您可以通过使用解析Python代码来删除注释tokenize.generate_tokens
。以下是从docs对该示例进行的稍微修改的版本:
import tokenize
import io
import sys
if sys.version_info[0] == 3:
StringIO = io.StringIO
else:
StringIO = io.BytesIO
def nocomment(s):
result = []
g = tokenize.generate_tokens(StringIO(s).readline)
for toknum, tokval, _, _, _ in g:
# print(toknum,tokval)
if toknum != tokenize.COMMENT:
result.append((toknum, tokval))
return tokenize.untokenize(result)
with open('script.py','r') as f:
content=f.read()
print(nocomment(content))
例如:
如果script.py包含
def foo(): # Remove this comment
''' But do not remove this #1 docstring
'''
# Another comment
pass
然后的输出nocomment
是
def foo ():
''' But do not remove this #1 docstring
'''
pass