Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #a
- #b
- import sys #c
- from tokenize import tokenize, untokenize, COMMENT, NEWLINE, NL
- from io import BytesIO
- ret = [] #d
- with open(sys.argv[1]) as f:
- for line in f:
- in_comment = False #e
- for tok in tokenize(BytesIO(line.encode('utf-8')).readline):
- if tok.type == COMMENT:
- in_comment = True
- continue
- elif in_comment and tok.type == NL:
- in_comment = False
- continue
- elif in_comment and tok.type == NEWLINE:
- in_comment = False
- ret.append((tok.type, tok.string))
- continue
- else: #f
- ret.append((tok.type, tok.string))
- print(untokenize(ret).decode('utf-8'))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement