120: #line 180 "python_tokeniser.ipk" 121: def tokenise(readline, 122: tokeneater=printtoken, 123: squashop=1, report_comments=1, split_multiline_strings=0): 124: t = python_tokeniser(squashop, report_comments, split_multiline_strings) 125: line = readline() 126: while line: 127: t.writeline(line) 128: for token in t.tokens: 129: apply(tokeneater,token) 130: t.tokens = [] 131: line = readline() 132: t.writeline('') 133: for token in t.tokens: 134: apply(tokeneater,token) 135: t.tokens = [] 136: