#!/usr/bin/env python3
import argparse
import tokenize
import io
import sys
def remove_comments_from_code(source_code):
tokens = tokenize.generate_tokens(io.StringIO(source_code).readline)
tokens_no_comments = [
token for token in tokens if token.type != tokenize.COMMENT
]
return tokenize.untokenize(tokens_no_comments)
def main():
parser = argparse.ArgumentParser(
description='Remove all comments from a Python file using tokenize.'
)
parser.add_argument(
'input_file',
help='Path to the target Python file.'
)
parser.add_argument(
'output_file',
nargs='?',
help='Path to save the cleaned Python file. If omitted, prints to stdout.'
)
args = parser.parse_args()
try:
with open(args.input_file, 'r', encoding='utf-8') as f:
source_code = f.read()
except FileNotFoundError:
print(f"Error: File not found: {args.input_file}", file=sys.stderr)
sys.exit(1)
except Exception as e:
print(f"Error reading file: {e}", file=sys.stderr)
sys.exit(1)
cleaned_code = remove_comments_from_code(source_code)
if args.output_file:
try:
with open(args.output_file, 'w', encoding='utf-8') as f:
f.write(cleaned_code)
except Exception as e:
print(f"Error writing file: {e}", file=sys.stderr)
sys.exit(1)
else:
print(cleaned_code)
if __name__ == '__main__':
main()