From 226dc59156329a0c5f4a3fef9e9162d83aaee4a5 Mon Sep 17 00:00:00 2001 From: youben11 Date: Fri, 15 Nov 2019 14:53:38 +0100 Subject: [PATCH] use backward compatible tokenizer func --- pyls/plugins/importmagic_lint.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/pyls/plugins/importmagic_lint.py b/pyls/plugins/importmagic_lint.py index a13d7724..42108fac 100644 --- a/pyls/plugins/importmagic_lint.py +++ b/pyls/plugins/importmagic_lint.py @@ -3,7 +3,7 @@ import re import sys import tokenize -from io import BytesIO +from io import StringIO from concurrent.futures import ThreadPoolExecutor import importmagic from pyls import hookimpl, lsp, _utils @@ -66,12 +66,11 @@ def _get_imports_list(source, index=None): def _tokenize(source): """Tokenize python source code. + Returns only NAME tokens. """ - stream = BytesIO(source.encode()) - tokens = tokenize.tokenize(stream.readline) - if tokens is None: - return [] - return list(tokens) + readline = StringIO(source).readline + filter_name = lambda token: token[0] == tokenize.NAME + return filter(filter_name, tokenize.tokenize(readline)) def _search_symbol(source, symbol): @@ -94,8 +93,8 @@ def _search_symbol(source, symbol): } } """ - symbol_tokens = _tokenize(symbol) - source_tokens = _tokenize(source) + symbol_tokens = list(_tokenize(symbol)) + source_tokens = list(_tokenize(source)) get_str = lambda token: token[1] symbol_tokens_str = list(map(get_str, symbol_tokens))