Slide 18
Slide 18 text
Preprocessing
import tokenize
def preprocess(filename, readline):
for token in tokenize.generate_tokens(readline):
...
if inside_backquotes:
tokens.extend([
(tokenize.NAME, 'backquotes'),
(tokenize.OP, '.'),
(tokenize.NAME, 'shell'),
(tokenize.OP, '('),
])
...