1# Construction 1
2from spacy.tokenizer import Tokenizer
3from spacy.lang.en import English
4nlp = English()
5# Create a blank Tokenizer with just the English vocab
6tokenizer = Tokenizer(nlp.vocab)
7
8# Construction 2
9from spacy.lang.en import English
10nlp = English()
11# Create a Tokenizer with the default settings for English
12# including punctuation rules and exceptions
13tokenizer = nlp.Defaults.create_tokenizer(nlp)
14
1# Construction 1
2from spacy.tokenizer import Tokenizer
3from spacy.lang.en import English
4nlp = English()
5# Create a blank Tokenizer with just the English vocab
6tokenizer = Tokenizer(nlp.vocab)
7
8# Construction 2
9from spacy.lang.en import English
10nlp = English()
11# Create a Tokenizer with the default settings for English
12# including punctuation rules and exceptions
13tokenizer = nlp.tokenizer