From 3c1a60618c837ad7c19675b2e1626a480c194beb Mon Sep 17 00:00:00 2001 From: Martin Date: Fri, 9 Oct 2015 16:35:59 +0200 Subject: Added exception handling to get_tokens functions. --- python/util.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) (limited to 'python') diff --git a/python/util.py b/python/util.py index 78fd362..ce89558 100644 --- a/python/util.py +++ b/python/util.py @@ -2,12 +2,15 @@ import io import re -from tokenize import tokenize +from tokenize import tokenize, TokenError def get_tokens(code): """ Gets a list of tokens. """ - stream = io.BytesIO(code.encode('utf-8')) - return [t.string for t in tokenize(stream.readline) if t.string] + try: + stream = io.BytesIO(code.encode('utf-8')) + return [t.string for t in tokenize(stream.readline) if t.string] + except TokenError: + return [] # Check if tokens contain a sequence of tokens (given as a list of strings). def has_token_sequence(tokens, sequence): @@ -37,7 +40,7 @@ def get_numbers(s): ''', s, re.VERBOSE) return [float(v) for v in str_vals] -def string_almost_equal(s, a, prec=333): +def string_almost_equal(s, a, prec=3): """ Searches string s for a value that is almost equal to a. """ for v in get_numbers(s): if almost_equal(v, a, prec): -- cgit v1.2.1