Location via proxy:   [ UP ]  
[Report a bug]   [Manage cookies]                
aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorFriedemann Kleint <Friedemann.Kleint@qt.io>2022-09-02 09:43:41 +0200
committerFriedemann Kleint <Friedemann.Kleint@qt.io>2022-09-02 12:16:45 +0200
commit065766883f5e34d20ca88084cd747813a84ed2d3 (patch)
tree2bca2c34d113f0198564155ac21789466d4b8b96 /tools/qtpy2cpp_lib/tokenizer.py
parent4cfa700d5965922b5156517a64790369deac354f (diff)
Move qtpy2cpp to sources/pyside-tools
Preparing the entry point. Task-number: PYSIDE-1945 Change-Id: I4a2fbe6d35b4f97bf0ab7cfc2085b86a40bc2558 Reviewed-by: Christian Tismer <tismer@stackless.com>
Diffstat (limited to 'tools/qtpy2cpp_lib/tokenizer.py')
-rw-r--r--tools/qtpy2cpp_lib/tokenizer.py55
1 files changed, 0 insertions, 55 deletions
diff --git a/tools/qtpy2cpp_lib/tokenizer.py b/tools/qtpy2cpp_lib/tokenizer.py
deleted file mode 100644
index d5e26c2a8..000000000
--- a/tools/qtpy2cpp_lib/tokenizer.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright (C) 2022 The Qt Company Ltd.
-# SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
-
-"""Tool to dump Python Tokens"""
-
-
-import sys
-import tokenize
-
-
-def format_token(t):
- r = repr(t)
- if r.startswith('TokenInfo('):
- r = r[10:]
- pos = r.find("), line='")
- if pos < 0:
- pos = r.find('), line="')
- if pos > 0:
- r = r[:pos + 1]
- return r
-
-
-def first_non_space(s):
- for i, c in enumerate(s):
- if c != ' ':
- return i
- return 0
-
-
-if __name__ == '__main__':
- if len(sys.argv) < 2:
- print("Specify file Name")
- sys.exit(1)
- filename = sys.argv[1]
- indent_level = 0
- indent = ''
- last_line_number = -1
- with tokenize.open(filename) as f:
- generator = tokenize.generate_tokens(f.readline)
- for t in generator:
- line_number = t.start[0]
- if line_number != last_line_number:
- code_line = t.line.rstrip()
- non_space = first_non_space(code_line)
- print('{:04d} {}{}'.format(line_number, '_' * non_space,
- code_line[non_space:]))
- last_line_number = line_number
- if t.type == tokenize.INDENT:
- indent_level = indent_level + 1
- indent = ' ' * indent_level
- elif t.type == tokenize.DEDENT:
- indent_level = indent_level - 1
- indent = ' ' * indent_level
- else:
- print(' ', indent, format_token(t))