6.16.10.1. Tokeniser interface

Start python section to interscript/tanglers/python.py[1 /6 ] Next Last
     1: #line 14 "python_tangler.ipk"
     2: #---------------------------------------------------------
     3: # python tangler: write to a file, insert source line numbers
     4: # using '#line ' comments
     5: # works for Python
     6: from interscript.tanglers.base import tangler_base
     7: import re
     8: import string
     9: from interscript.tokenisers.python import python_tokeniser
    10: from interscript.tokenisers.python import COMMENT, \
    11:    MULTILINE_STRING_FIRST, \
    12:    MULTILINE_STRING_MIDDLE, \
    13:    MULTILINE_STRING_LAST
    14: import keyword
    15: import token
    16: 
    17: py_bracket_tokens = [
    18:   token.LPAR, token.RPAR,
    19:   token.LSQB, token.RSQB,
    20:   token.LBRACE, token.RBRACE]
    21: 
    22: py_punct_tokens = [
    23:   token.COLON, token.COMMA, token.SEMI]
    24: 
    25: py_op_tokens = [
    26:   token.OP,
    27:   token.PLUS, token.MINUS, token.STAR, token.SLASH,
    28:   token.VBAR, token.AMPER,
    29:   token.LESS, token.GREATER, token.EQUAL,
    30:   token.DOT, token.PERCENT,
    31:   token.BACKQUOTE, token.EQEQUAL,
    32:   token.NOTEQUAL, token.LESSEQUAL, token.GREATEREQUAL,
    33:   token.TILDE, token.CIRCUMFLEX,
    34:   token.LEFTSHIFT,  token.RIGHTSHIFT, token.DOUBLESTAR]
    35: 
End python section to interscript/tanglers/python.py[1]