from spacy.tokenizer import Tokenizer from spacy.lang.en import English vocab = English() # Create a Tokenizer with the default settings for English tokenizer = vocab.tokenizer tokens = tokenizer(sentence) print(list(tokens)) """[Following, the, debut, of, Bitcoin, futures, ETF, in, the, United, States, ,, the, crypto, market, is, abuzz, with, talks, of, an, impending, Ether, ETF.Speaking, on, a, show, on, CNBC, ,, Michael, Sonnenshein, ,, CEO, of, Grayscale, --, an, asset, management, company, with, $, 52, billion, in, assets, under, management, --, says, it, is, possible, ., He, said, it, ', stands, to, reason, ', the, Securities, and, Exchange, Committee, (, SEC, ), will, proactively, consider, bringing, Ethereum, ETF, and, other, similar, products, in, the, US, market, ., Canada, already, has, Bitcoin, ,, Ethereum, ETFsWhile, US, regulators, have, allowed, Bitcoin, futures, ETF, to, be, traded, on, the, exchanges, ,, Canada, has, allowed, both, Bitcoin, and, Ethereum, ETFs, .]"""