From 5e0a11d21183b7936a2dc33781f57110754d5a61 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Fri, 9 Feb 2024 12:44:19 +0100 Subject: [PATCH] Add ruff rules C4 and PERF for comprehensions and performance --- pyproject.toml | 7 +- src/whoosh/analysis/filters.py | 2 +- src/whoosh/analysis/intraword.py | 14 +- src/whoosh/analysis/morph.py | 2 +- src/whoosh/analysis/ngrams.py | 4 +- src/whoosh/analysis/tokenizers.py | 2 +- src/whoosh/automata/fst.py | 2 +- src/whoosh/compat.py | 2 +- src/whoosh/filedb/gae.py | 5 +- src/whoosh/formats.py | 6 +- src/whoosh/lang/lovins.py | 618 +++++++++++++++--------------- src/whoosh/multiproc.py | 5 +- src/whoosh/qparser/default.py | 2 +- src/whoosh/query/terms.py | 14 +- src/whoosh/query/wrappers.py | 2 +- src/whoosh/support/pyparsing.py | 10 +- src/whoosh/util/varints.py | 5 +- tests/test_dateparse.py | 124 +++--- tests/test_reading.py | 2 +- tests/test_searching.py | 20 +- tests/test_sorting.py | 5 +- tests/test_writing.py | 2 +- 22 files changed, 424 insertions(+), 431 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3da7ee29..86ee4c5b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,8 +6,10 @@ select = [ "AIR", # Airflow "ASYNC", # flake8-async "BLE", # flake8-blind-except + "C4", # flake8-comprehensions "C90", # McCabe cyclomatic complexity "DJ", # flake8-django + "DTZ", # flake8-datetimez "EXE", # flake8-executable "F", # Pyflakes "FA", # flake8-future-annotations @@ -17,6 +19,7 @@ select = [ "INT", # flake8-gettext "LOG", # flake8-logging "NPY", # NumPy-specific rules + "PERF", # Perflint "PLC", # Pylint conventions "PLE", # Pylint errors "PLR091", # Pylint Refactor just for max-args, max-branches, etc. @@ -33,11 +36,9 @@ select = [ # "ANN", # flake8-annotations # "ARG", # flake8-unused-arguments # "B", # flake8-bugbear - # "C4", # flake8-comprehensions # "COM", # flake8-commas # "CPY", # flake8-copyright # "D", # pydocstyle - # "DTZ", # flake8-datetimez # "E", # pycodestyle # "EM", # flake8-errmsg # "ERA", # eradicate @@ -49,7 +50,6 @@ select = [ # "ISC", # flake8-implicit-str-concat # "N", # pep8-naming # "PD", # pandas-vet - # "PERF", # Perflint # "PGH", # pygrep-hooks # "PIE", # flake8-pie # "PL", # Pylint @@ -71,6 +71,7 @@ ignore = [ "F401", "F811", "F841", + "PERF203", "UP031", ] diff --git a/src/whoosh/analysis/filters.py b/src/whoosh/analysis/filters.py index ee64bf9b..8a295cbf 100644 --- a/src/whoosh/analysis/filters.py +++ b/src/whoosh/analysis/filters.py @@ -392,7 +392,7 @@ class CharsetFilter(Filter): http://www.sphinxsearch.com/docs/current.html#conf-charset-table. """ - __inittypes__ = dict(charmap=dict) + __inittypes__ = {"charmap": dict} def __init__(self, charmap): """ diff --git a/src/whoosh/analysis/intraword.py b/src/whoosh/analysis/intraword.py index 06b99c25..61663edf 100644 --- a/src/whoosh/analysis/intraword.py +++ b/src/whoosh/analysis/intraword.py @@ -278,13 +278,13 @@ class IntraWordFilter(Filter): is_morph = True - __inittypes__ = dict( - delims=text_type, - splitwords=bool, - splitnums=bool, - mergewords=bool, - mergenums=bool, - ) + __inittypes__ = { + "delims": text_type, + "splitwords": bool, + "splitnums": bool, + "mergewords": bool, + "mergenums": bool, + } def __init__( self, diff --git a/src/whoosh/analysis/morph.py b/src/whoosh/analysis/morph.py index 7b1944c1..982e5fa4 100644 --- a/src/whoosh/analysis/morph.py +++ b/src/whoosh/analysis/morph.py @@ -66,7 +66,7 @@ class StemFilter(Filter): stemmers in that library. """ - __inittypes__ = dict(stemfn=object, ignore=list) + __inittypes__ = {"stemfn": object, "ignore": list} is_morph = True diff --git a/src/whoosh/analysis/ngrams.py b/src/whoosh/analysis/ngrams.py index 4281c1fa..423b4aa5 100644 --- a/src/whoosh/analysis/ngrams.py +++ b/src/whoosh/analysis/ngrams.py @@ -49,7 +49,7 @@ class NgramTokenizer(Tokenizer): could combine a RegexTokenizer with NgramFilter instead. """ - __inittypes__ = dict(minsize=int, maxsize=int) + __inittypes__ = {"minsize": int, "maxsize": int} def __init__(self, minsize, maxsize=None): """ @@ -135,7 +135,7 @@ class NgramFilter(Filter): ["hell", "ello", "ther", "here"] """ - __inittypes__ = dict(minsize=int, maxsize=int) + __inittypes__ = {"minsize": int, "maxsize": int} def __init__(self, minsize, maxsize=None, at=None): """ diff --git a/src/whoosh/analysis/tokenizers.py b/src/whoosh/analysis/tokenizers.py index b76e7df5..58461b0c 100644 --- a/src/whoosh/analysis/tokenizers.py +++ b/src/whoosh/analysis/tokenizers.py @@ -222,7 +222,7 @@ class CharsetTokenizer(Tokenizer): http://www.sphinxsearch.com/docs/current.html#conf-charset-table. """ - __inittype__ = dict(charmap=str) + __inittype__ = {"charmap": str} def __init__(self, charmap): """ diff --git a/src/whoosh/automata/fst.py b/src/whoosh/automata/fst.py index 2566de35..907f36c2 100644 --- a/src/whoosh/automata/fst.py +++ b/src/whoosh/automata/fst.py @@ -1274,7 +1274,7 @@ def find_arc(self, address, label, arc=None): # Convenience methods def list_arcs(self, address): - return list(arc.copy() for arc in self.iter_arcs(address)) + return [arc.copy() for arc in self.iter_arcs(address)] def arc_dict(self, address): return {arc.label: arc.copy() for arc in self.iter_arcs(address)} diff --git a/src/whoosh/compat.py b/src/whoosh/compat.py index 9bd790c7..f098d036 100644 --- a/src/whoosh/compat.py +++ b/src/whoosh/compat.py @@ -105,7 +105,7 @@ def u(s): return s def with_metaclass(meta, base=object): - ns = dict(base=base, meta=meta) + ns = {"base": base, "meta": meta} exec_( """class _WhooshBase(base, metaclass=meta): pass""", diff --git a/src/whoosh/filedb/gae.py b/src/whoosh/filedb/gae.py index 7ffca5ea..b2363c20 100644 --- a/src/whoosh/filedb/gae.py +++ b/src/whoosh/filedb/gae.py @@ -116,10 +116,7 @@ def open_index(self, indexname=_DEF_INDEX_NAME, schema=None): def list(self): query = DatastoreFile.all() - keys = [] - for file in query: - keys.append(file.key().id_or_name()) - return keys + return [file.key().id_or_name() for file in query] def clean(self): pass diff --git a/src/whoosh/formats.py b/src/whoosh/formats.py index ef36f195..7f9c39f2 100644 --- a/src/whoosh/formats.py +++ b/src/whoosh/formats.py @@ -57,7 +57,7 @@ class Format: posting_size = -1 textual = True - __inittypes__ = dict(field_boost=float) + __inittypes__ = {"field_boost": float} def __init__(self, field_boost=1.0, **options): """ @@ -144,7 +144,7 @@ class Existence(Format): """ posting_size = 0 - __inittypes__ = dict(field_boost=float) + __inittypes__ = {"field_boost": float} def __init__(self, field_boost=1.0, **options): self.field_boost = field_boost @@ -175,7 +175,7 @@ class Frequency(Format): """ posting_size = _INT_SIZE - __inittypes__ = dict(field_boost=float, boost_as_freq=bool) + __inittypes__ = {"field_boost": float, "boost_as_freq": bool} def __init__(self, field_boost=1.0, boost_as_freq=False, **options): """ diff --git a/src/whoosh/lang/lovins.py b/src/whoosh/lang/lovins.py index e3b114ba..112ee9fe 100644 --- a/src/whoosh/lang/lovins.py +++ b/src/whoosh/lang/lovins.py @@ -186,324 +186,306 @@ def c(base): m = [None] * 12 -m[11] = dict((("alistically", B), ("arizability", A), ("izationally", B))) -m[10] = dict( - (("antialness", A), ("arisations", A), ("arizations", A), ("entialness", A)) -) -m[9] = dict( - ( - ("allically", C), - ("antaneous", A), - ("antiality", A), - ("arisation", A), - ("arization", A), - ("ationally", B), - ("ativeness", A), - ("eableness", E), - ("entations", A), - ("entiality", A), - ("entialize", A), - ("entiation", A), - ("ionalness", A), - ("istically", A), - ("itousness", A), - ("izability", A), - ("izational", A), - ) -) -m[8] = dict( - ( - ("ableness", A), - ("arizable", A), - ("entation", A), - ("entially", A), - ("eousness", A), - ("ibleness", A), - ("icalness", A), - ("ionalism", A), - ("ionality", A), - ("ionalize", A), - ("iousness", A), - ("izations", A), - ("lessness", A), - ) -) -m[7] = dict( - ( - ("ability", A), - ("aically", A), - ("alistic", B), - ("alities", A), - ("ariness", E), - ("aristic", A), - ("arizing", A), - ("ateness", A), - ("atingly", A), - ("ational", B), - ("atively", A), - ("ativism", A), - ("elihood", E), - ("encible", A), - ("entally", A), - ("entials", A), - ("entiate", A), - ("entness", A), - ("fulness", A), - ("ibility", A), - ("icalism", A), - ("icalist", A), - ("icality", A), - ("icalize", A), - ("ication", G), - ("icianry", A), - ("ination", A), - ("ingness", A), - ("ionally", A), - ("isation", A), - ("ishness", A), - ("istical", A), - ("iteness", A), - ("iveness", A), - ("ivistic", A), - ("ivities", A), - ("ization", F), - ("izement", A), - ("oidally", A), - ("ousness", A), - ) -) -m[6] = dict( - ( - ("aceous", A), - ("acious", B), - ("action", G), - ("alness", A), - ("ancial", A), - ("ancies", A), - ("ancing", B), - ("ariser", A), - ("arized", A), - ("arizer", A), - ("atable", A), - ("ations", B), - ("atives", A), - ("eature", Z), - ("efully", A), - ("encies", A), - ("encing", A), - ("ential", A), - ("enting", C), - ("entist", A), - ("eously", A), - ("ialist", A), - ("iality", A), - ("ialize", A), - ("ically", A), - ("icance", A), - ("icians", A), - ("icists", A), - ("ifully", A), - ("ionals", A), - ("ionate", D), - ("ioning", A), - ("ionist", A), - ("iously", A), - ("istics", A), - ("izable", E), - ("lessly", A), - ("nesses", A), - ("oidism", A), - ) -) -m[5] = dict( - ( - ("acies", A), - ("acity", A), - ("aging", B), - ("aical", A), - ("alist", A), - ("alism", B), - ("ality", A), - ("alize", A), - ("allic", b), - ("anced", B), - ("ances", B), - ("antic", C), - ("arial", A), - ("aries", A), - ("arily", A), - ("arity", B), - ("arize", A), - ("aroid", A), - ("ately", A), - ("ating", I), - ("ation", B), - ("ative", A), - ("ators", A), - ("atory", A), - ("ature", E), - ("early", Y), - ("ehood", A), - ("eless", A), - ("elily", A), - ("ement", A), - ("enced", A), - ("ences", A), - ("eness", E), - ("ening", E), - ("ental", A), - ("ented", C), - ("ently", A), - ("fully", A), - ("ially", A), - ("icant", A), - ("ician", A), - ("icide", A), - ("icism", A), - ("icist", A), - ("icity", A), - ("idine", I), - ("iedly", A), - ("ihood", A), - ("inate", A), - ("iness", A), - ("ingly", B), - ("inism", J), - ("inity", c), - ("ional", A), - ("ioned", A), - ("ished", A), - ("istic", A), - ("ities", A), - ("itous", A), - ("ively", A), - ("ivity", A), - ("izers", F), - ("izing", F), - ("oidal", A), - ("oides", A), - ("otide", A), - ("ously", A), - ) -) -m[4] = dict( - ( - ("able", A), - ("ably", A), - ("ages", B), - ("ally", B), - ("ance", B), - ("ancy", B), - ("ants", B), - ("aric", A), - ("arly", K), - ("ated", I), - ("ates", A), - ("atic", B), - ("ator", A), - ("ealy", Y), - ("edly", E), - ("eful", A), - ("eity", A), - ("ence", A), - ("ency", A), - ("ened", E), - ("enly", E), - ("eous", A), - ("hood", A), - ("ials", A), - ("ians", A), - ("ible", A), - ("ibly", A), - ("ical", A), - ("ides", L), - ("iers", A), - ("iful", A), - ("ines", M), - ("ings", N), - ("ions", B), - ("ious", A), - ("isms", B), - ("ists", A), - ("itic", H), - ("ized", F), - ("izer", F), - ("less", A), - ("lily", A), - ("ness", A), - ("ogen", A), - ("ward", A), - ("wise", A), - ("ying", B), - ("yish", A), - ) -) -m[3] = dict( - ( - ("acy", A), - ("age", B), - ("aic", A), - ("als", b), - ("ant", B), - ("ars", O), - ("ary", F), - ("ata", A), - ("ate", A), - ("eal", Y), - ("ear", Y), - ("ely", E), - ("ene", E), - ("ent", C), - ("ery", E), - ("ese", A), - ("ful", A), - ("ial", A), - ("ian", A), - ("ics", A), - ("ide", L), - ("ied", A), - ("ier", A), - ("ies", P), - ("ily", A), - ("ine", M), - ("ing", N), - ("ion", Q), - ("ish", C), - ("ism", B), - ("ist", A), - ("ite", a), - ("ity", A), - ("ium", A), - ("ive", A), - ("ize", F), - ("oid", A), - ("one", R), - ("ous", A), - ) -) -m[2] = dict( - ( - ("ae", A), - ("al", b), - ("ar", X), - ("as", B), - ("ed", E), - ("en", F), - ("es", E), - ("ia", A), - ("ic", A), - ("is", A), - ("ly", B), - ("on", S), - ("or", T), - ("um", U), - ("us", V), - ("yl", R), - ("s'", A), - ("'s", A), - ) -) -m[1] = dict((("a", A), ("e", A), ("i", A), ("o", A), ("s", W), ("y", B))) +m[11] = {"alistically": B, "arizability": A, "izationally": B} +m[10] = {"antialness": A, "arisations": A, "arizations": A, "entialness": A} +m[9] = { + "allically": C, + "antaneous": A, + "antiality": A, + "arisation": A, + "arization": A, + "ationally": B, + "ativeness": A, + "eableness": E, + "entations": A, + "entiality": A, + "entialize": A, + "entiation": A, + "ionalness": A, + "istically": A, + "itousness": A, + "izability": A, + "izational": A, +} +m[8] = { + "ableness": A, + "arizable": A, + "entation": A, + "entially": A, + "eousness": A, + "ibleness": A, + "icalness": A, + "ionalism": A, + "ionality": A, + "ionalize": A, + "iousness": A, + "izations": A, + "lessness": A, +} +m[7] = { + "ability": A, + "aically": A, + "alistic": B, + "alities": A, + "ariness": E, + "aristic": A, + "arizing": A, + "ateness": A, + "atingly": A, + "ational": B, + "atively": A, + "ativism": A, + "elihood": E, + "encible": A, + "entally": A, + "entials": A, + "entiate": A, + "entness": A, + "fulness": A, + "ibility": A, + "icalism": A, + "icalist": A, + "icality": A, + "icalize": A, + "ication": G, + "icianry": A, + "ination": A, + "ingness": A, + "ionally": A, + "isation": A, + "ishness": A, + "istical": A, + "iteness": A, + "iveness": A, + "ivistic": A, + "ivities": A, + "ization": F, + "izement": A, + "oidally": A, + "ousness": A, +} +m[6] = { + "aceous": A, + "acious": B, + "action": G, + "alness": A, + "ancial": A, + "ancies": A, + "ancing": B, + "ariser": A, + "arized": A, + "arizer": A, + "atable": A, + "ations": B, + "atives": A, + "eature": Z, + "efully": A, + "encies": A, + "encing": A, + "ential": A, + "enting": C, + "entist": A, + "eously": A, + "ialist": A, + "iality": A, + "ialize": A, + "ically": A, + "icance": A, + "icians": A, + "icists": A, + "ifully": A, + "ionals": A, + "ionate": D, + "ioning": A, + "ionist": A, + "iously": A, + "istics": A, + "izable": E, + "lessly": A, + "nesses": A, + "oidism": A, +} +m[5] = { + "acies": A, + "acity": A, + "aging": B, + "aical": A, + "alist": A, + "alism": B, + "ality": A, + "alize": A, + "allic": b, + "anced": B, + "ances": B, + "antic": C, + "arial": A, + "aries": A, + "arily": A, + "arity": B, + "arize": A, + "aroid": A, + "ately": A, + "ating": I, + "ation": B, + "ative": A, + "ators": A, + "atory": A, + "ature": E, + "early": Y, + "ehood": A, + "eless": A, + "elily": A, + "ement": A, + "enced": A, + "ences": A, + "eness": E, + "ening": E, + "ental": A, + "ented": C, + "ently": A, + "fully": A, + "ially": A, + "icant": A, + "ician": A, + "icide": A, + "icism": A, + "icist": A, + "icity": A, + "idine": I, + "iedly": A, + "ihood": A, + "inate": A, + "iness": A, + "ingly": B, + "inism": J, + "inity": c, + "ional": A, + "ioned": A, + "ished": A, + "istic": A, + "ities": A, + "itous": A, + "ively": A, + "ivity": A, + "izers": F, + "izing": F, + "oidal": A, + "oides": A, + "otide": A, + "ously": A, +} +m[4] = { + "able": A, + "ably": A, + "ages": B, + "ally": B, + "ance": B, + "ancy": B, + "ants": B, + "aric": A, + "arly": K, + "ated": I, + "ates": A, + "atic": B, + "ator": A, + "ealy": Y, + "edly": E, + "eful": A, + "eity": A, + "ence": A, + "ency": A, + "ened": E, + "enly": E, + "eous": A, + "hood": A, + "ials": A, + "ians": A, + "ible": A, + "ibly": A, + "ical": A, + "ides": L, + "iers": A, + "iful": A, + "ines": M, + "ings": N, + "ions": B, + "ious": A, + "isms": B, + "ists": A, + "itic": H, + "ized": F, + "izer": F, + "less": A, + "lily": A, + "ness": A, + "ogen": A, + "ward": A, + "wise": A, + "ying": B, + "yish": A, +} +m[3] = { + "acy": A, + "age": B, + "aic": A, + "als": b, + "ant": B, + "ars": O, + "ary": F, + "ata": A, + "ate": A, + "eal": Y, + "ear": Y, + "ely": E, + "ene": E, + "ent": C, + "ery": E, + "ese": A, + "ful": A, + "ial": A, + "ian": A, + "ics": A, + "ide": L, + "ied": A, + "ier": A, + "ies": P, + "ily": A, + "ine": M, + "ing": N, + "ion": Q, + "ish": C, + "ism": B, + "ist": A, + "ite": a, + "ity": A, + "ium": A, + "ive": A, + "ize": F, + "oid": A, + "one": R, + "ous": A, +} +m[2] = { + "ae": A, + "al": b, + "ar": X, + "as": B, + "ed": E, + "en": F, + "es": E, + "ia": A, + "ic": A, + "is": A, + "ly": B, + "on": S, + "or": T, + "um": U, + "us": V, + "yl": R, + "s'": A, + "'s": A, +} +m[1] = {"a": A, "e": A, "i": A, "o": A, "s": W, "y": B} def remove_ending(word): diff --git a/src/whoosh/multiproc.py b/src/whoosh/multiproc.py index 36264e2e..9b0e44b3 100644 --- a/src/whoosh/multiproc.py +++ b/src/whoosh/multiproc.py @@ -377,10 +377,7 @@ def _commit(self, mergetype, optimize, merge): # Merge existing segments finalsegments = self._merge_segments(mergetype, optimize, merge) - results = [] - for writer in self.tasks: - results.append(finish_subsegment(writer)) - + results = [finish_subsegment(writer) for writer in self.tasks] self._merge_subsegments(results, mergetype) self._close_segment() self._assemble_segment() diff --git a/src/whoosh/qparser/default.py b/src/whoosh/qparser/default.py index 2b028e53..30416872 100644 --- a/src/whoosh/qparser/default.py +++ b/src/whoosh/qparser/default.py @@ -164,7 +164,7 @@ def _priorized(self, methodname): # Call either .taggers() or .filters() on the plugin method = getattr(plugin, methodname) for item in method(self): - items_and_priorities.append(item) + items_and_priorities.append(item) # noqa: PERF402 # Sort the list by priority (lower priority runs first) items_and_priorities.sort(key=lambda x: x[1]) # Return the sorted list without the priorities diff --git a/src/whoosh/query/terms.py b/src/whoosh/query/terms.py index cffd471b..b7ae92f7 100644 --- a/src/whoosh/query/terms.py +++ b/src/whoosh/query/terms.py @@ -43,7 +43,7 @@ class Term(qcore.Query): >>> Term("content", u"render") """ - __inittypes__ = dict(fieldname=str, text=text_type, boost=float) + __inittypes__ = {"fieldname": str, "text": text_type, "boost": float} def __init__(self, fieldname, text, boost=1.0, minquality=None): self.fieldname = fieldname @@ -247,7 +247,7 @@ def matcher(self, searcher, context=None): class PatternQuery(MultiTerm): """An intermediate base class for common methods of Prefix and Wildcard.""" - __inittypes__ = dict(fieldname=str, text=text_type, boost=float) + __inittypes__ = {"fieldname": str, "text": text_type, "boost": float} def __init__(self, fieldname, text, boost=1.0, constantscore=True): self.fieldname = fieldname @@ -445,9 +445,13 @@ def terms(self, phrases=False): class FuzzyTerm(ExpandingTerm): """Matches documents containing words similar to the given term.""" - __inittypes__ = dict( - fieldname=str, text=text_type, boost=float, maxdist=float, prefixlength=int - ) + __inittypes__ = { + "fieldname": str, + "text": text_type, + "boost": float, + "maxdist": float, + "prefixlength": int, + } def __init__( self, fieldname, text, boost=1.0, maxdist=1, prefixlength=1, constantscore=True diff --git a/src/whoosh/query/wrappers.py b/src/whoosh/query/wrappers.py index a3fc62eb..531f7a08 100644 --- a/src/whoosh/query/wrappers.py +++ b/src/whoosh/query/wrappers.py @@ -84,7 +84,7 @@ class Not(qcore.Query): >>> Term("content", u"render") - Term("content", u"texture") """ - __inittypes__ = dict(query=qcore.Query) + __inittypes__ = {"query": qcore.Query} def __init__(self, query, boost=1.0): """ diff --git a/src/whoosh/support/pyparsing.py b/src/whoosh/support/pyparsing.py index 8db43458..82d53c2d 100644 --- a/src/whoosh/support/pyparsing.py +++ b/src/whoosh/support/pyparsing.py @@ -455,13 +455,13 @@ def __getitem__(self, i): def __setitem__(self, k, v): if isinstance(v, _ParseResultsWithOffset): - self.__tokdict[k] = self.__tokdict.get(k, list()) + [v] + self.__tokdict[k] = self.__tokdict.get(k, []) + [v] sub = v[0] elif isinstance(k, int): self.__toklist[k] = v sub = v else: - self.__tokdict[k] = self.__tokdict.get(k, list()) + [ + self.__tokdict[k] = self.__tokdict.get(k, []) + [ _ParseResultsWithOffset(v, 0) ] sub = v @@ -2940,9 +2940,9 @@ def parseImpl(self, instring, loc, doActions=True): ) # add any unmatched Optionals, in case they have default values defined - matchOrder += list( + matchOrder += [ e for e in self.exprs if isinstance(e, Optional) and e.expr in tmpOpt - ) + ] resultlist = [] for e in matchOrder: @@ -3982,7 +3982,7 @@ def withAttribute(*args, **attrDict): attrs = args[:] else: attrs = attrDict.items() - attrs = [(k, v) for k, v in attrs] + attrs = list(attrs) def pa(s, l, tokens): for attrName, attrValue in attrs: diff --git a/src/whoosh/util/varints.py b/src/whoosh/util/varints.py index 778783d3..6bc58ba8 100644 --- a/src/whoosh/util/varints.py +++ b/src/whoosh/util/varints.py @@ -46,10 +46,7 @@ def _varint(i): _varint_cache_size = 512 -_varint_cache = [] -for i in range(0, _varint_cache_size): - _varint_cache.append(_varint(i)) -_varint_cache = tuple(_varint_cache) +_varint_cache = tuple([_varint(i) for i in range(_varint_cache_size)]) def varint(i): diff --git a/tests/test_dateparse.py b/tests/test_dateparse.py index 9acba143..6e0ca739 100644 --- a/tests/test_dateparse.py +++ b/tests/test_dateparse.py @@ -291,53 +291,53 @@ def test_bundle(p=english.bundle): def test_ranges(p=english.torange): assert_timespan( p.date_from("last tuesday to next tuesday", basedate), - dict(year=2010, month=9, day=14), - dict(year=2010, month=9, day=21), + {"year": 2010, "month": 9, "day": 14}, + {"year": 2010, "month": 9, "day": 21}, ) assert_timespan( p.date_from("last monday to dec 25", basedate), - dict(year=2010, month=9, day=13), - dict(year=None, month=12, day=25), + {"year": 2010, "month": 9, "day": 13}, + {"year": None, "month": 12, "day": 25}, ) assert_timespan( p.date_from("oct 25 to feb 14", basedate), - dict(year=None, month=10, day=25), - dict(year=None, month=2, day=14), + {"year": None, "month": 10, "day": 25}, + {"year": None, "month": 2, "day": 14}, ) assert_timespan( p.date_from("3am oct 12 to 5pm", basedate), - dict(year=None, month=10, day=12, hour=3), - dict(year=None, month=None, day=None, hour=17), + {"year": None, "month": 10, "day": 12, "hour": 3}, + {"year": None, "month": None, "day": None, "hour": 17}, ) assert_timespan( p.date_from("3am feb 12 to 5pm today", basedate), - dict(year=None, month=2, day=12, hour=3), - dict(year=2010, month=9, day=20, hour=17), + {"year": None, "month": 2, "day": 12, "hour": 3}, + {"year": 2010, "month": 9, "day": 20, "hour": 17}, ) assert_timespan( p.date_from("feb to oct", basedate), - dict(year=None, month=2), - dict(year=None, month=10), + {"year": None, "month": 2}, + {"year": None, "month": 10}, ) assert_timespan( p.date_from("oct 25 2005 11am to 5pm tomorrow", basedate), - dict(year=2005, month=10, day=25, hour=11), - dict(year=2010, month=9, day=21, hour=17), + {"year": 2005, "month": 10, "day": 25, "hour": 11}, + {"year": 2010, "month": 9, "day": 21, "hour": 17}, ) assert_timespan( p.date_from("oct 5 2005 to november 20", basedate), - dict(year=2005, month=10, day=5), - dict(year=None, month=11, day=20), + {"year": 2005, "month": 10, "day": 5}, + {"year": None, "month": 11, "day": 20}, ) assert_timespan( p.date_from("2007 to 2010", basedate), - dict(year=2007, month=None, day=None), - dict(year=2010, month=None, day=None), + {"year": 2007, "month": None, "day": None}, + {"year": 2010, "month": None, "day": None}, ) assert_timespan( p.date_from("2007 to oct 12", basedate), - dict(year=2007, month=None, day=None), - dict(year=None, month=10, day=12), + {"year": 2007, "month": None, "day": None}, + {"year": None, "month": 10, "day": 12}, ) assert_datespan( @@ -367,110 +367,122 @@ def test_final_dates(p=english): def test_final_ranges(p=english): assert_unamb_span( p.date_from("feb to nov", basedate), - dict(year=2010, month=2), - dict(year=2010, month=11), + {"year": 2010, "month": 2}, + {"year": 2010, "month": 11}, ) # 2005 to 10 oct 2009 -> jan 1 2005 to oct 31 2009 assert_unamb_span( p.date_from("2005 to 10 oct 2009", basedate), - dict(year=2005), - dict(year=2009, month=10, day=10), + {"year": 2005}, + {"year": 2009, "month": 10, "day": 10}, ) # jan 12 to oct 10 2009 -> jan 12 2009 to oct 10 2009 assert_unamb_span( p.date_from("jan 12 to oct 10 2009", basedate), - dict(year=2009, month=1, day=12), - dict(year=2009, month=10, day=10), + {"year": 2009, "month": 1, "day": 12}, + {"year": 2009, "month": 10, "day": 10}, ) # jan to oct 2009 -> jan 1 2009 to oct 31 2009 assert_unamb_span( p.date_from("jan to oct 2009", basedate), - dict(year=2009, month=1), - dict(year=2009, month=10, day=31), + {"year": 2009, "month": 1}, + {"year": 2009, "month": 10, "day": 31}, ) # mar 2005 to oct -> mar 1 2005 to oct 31 basedate.year assert_unamb_span( p.date_from("mar 2005 to oct", basedate), - dict(year=2005, month=3), - dict(year=2010, month=10, day=31), + {"year": 2005, "month": 3}, + {"year": 2010, "month": 10, "day": 31}, ) # jan 10 to jan 25 -> jan 10 basedate.year to jan 25 basedate.year assert_unamb_span( p.date_from("jan 10 to jan 25", basedate), - dict(year=2010, month=1, day=10), - dict(year=2010, month=1, day=25), + {"year": 2010, "month": 1, "day": 10}, + {"year": 2010, "month": 1, "day": 25}, ) # jan 2005 to feb 2009 -> jan 1 2005 to feb 28 2009 assert_unamb_span( p.date_from("jan 2005 to feb 2009", basedate), - dict(year=2005, month=1), - dict(year=2009, month=2), + {"year": 2005, "month": 1}, + {"year": 2009, "month": 2}, ) # jan 5000 to mar -> jan 1 5000 to mar 5000 assert_unamb_span( p.date_from("jan 5000 to mar", basedate), - dict(year=5000, month=1), - dict(year=5000, month=3), + {"year": 5000, "month": 1}, + {"year": 5000, "month": 3}, ) # jun 5000 to jan -> jun 1 5000 to jan 31 5001 assert_unamb_span( p.date_from("jun 5000 to jan", basedate), - dict(year=5000, month=6), - dict(year=5001, month=1), + {"year": 5000, "month": 6}, + {"year": 5001, "month": 1}, ) # oct 2010 to feb -> oct 1 2010 to feb 28 2011 assert_unamb_span( p.date_from("oct 2010 to feb", basedate), - dict(year=2010, month=10), - dict(year=2011, month=2), + {"year": 2010, "month": 10}, + {"year": 2011, "month": 2}, ) assert_unamb_span( p.date_from("5pm to 3am", basedate), - dict(year=2010, month=9, day=20, hour=17), - dict(year=2010, month=9, day=21, hour=3), + {"year": 2010, "month": 9, "day": 20, "hour": 17}, + {"year": 2010, "month": 9, "day": 21, "hour": 3}, ) assert_unamb_span( p.date_from("5am to 3 am tomorrow", basedate), - dict(year=2010, month=9, day=20, hour=5), - dict(year=2010, month=9, day=21, hour=3), + {"year": 2010, "month": 9, "day": 20, "hour": 5}, + {"year": 2010, "month": 9, "day": 21, "hour": 3}, ) assert_unamb_span( p.date_from("3am to 5 pm tomorrow", basedate), - dict(year=2010, month=9, day=21, hour=3), - dict(year=2010, month=9, day=21, hour=17), + {"year": 2010, "month": 9, "day": 21, "hour": 3}, + {"year": 2010, "month": 9, "day": 21, "hour": 17}, ) assert_unamb_span( p.date_from("-2hrs to +20min", basedate), - dict( - year=2010, month=9, day=20, hour=13, minute=16, second=6, microsecond=454000 - ), - dict( - year=2010, month=9, day=20, hour=15, minute=36, second=6, microsecond=454000 - ), + { + "year": 2010, + "month": 9, + "day": 20, + "hour": 13, + "minute": 16, + "second": 6, + "microsecond": 454000, + }, + { + "year": 2010, + "month": 9, + "day": 20, + "hour": 15, + "minute": 36, + "second": 6, + "microsecond": 454000, + }, ) # Swap assert_unamb_span( p.date_from("oct 25 2009 to feb 14 2008", basedate), - dict(year=2008, month=2, day=14), - dict(year=2009, month=10, day=25), + {"year": 2008, "month": 2, "day": 14}, + {"year": 2009, "month": 10, "day": 25}, ) assert_unamb_span( p.date_from("oct 25 5000 to tomorrow", basedate), - dict(year=2010, month=9, day=21), - dict(year=5000, month=10, day=25), + {"year": 2010, "month": 9, "day": 21}, + {"year": 5000, "month": 10, "day": 25}, ) diff --git a/tests/test_reading.py b/tests/test_reading.py index 8311c66a..b7a4a9df 100644 --- a/tests/test_reading.py +++ b/tests/test_reading.py @@ -288,7 +288,7 @@ def test_all_stored_fields(): with ix.searcher() as s: assert s.doc_count_all() == 4 assert s.doc_count() == 2 - sfs = list((sf["a"], sf["b"]) for sf in s.all_stored_fields()) + sfs = [(sf["a"], sf["b"]) for sf in s.all_stored_fields()] assert sfs == [("alfa", "bravo"), ("alpaca", "beagle")] diff --git a/tests/test_searching.py b/tests/test_searching.py index 3b4c7e56..aa8410c0 100644 --- a/tests/test_searching.py +++ b/tests/test_searching.py @@ -937,7 +937,7 @@ def test_find_missing(): qp = qparser.QueryParser("text", schema) q = qp.parse(u("NOT id:*")) r = s.search(q, limit=None) - assert list(h["text"] for h in r) == ["charlie", "echo", "golf"] + assert [h["text"] for h in r] == ["charlie", "echo", "golf"] def test_ngram_phrase(): @@ -1537,9 +1537,12 @@ def check(kwargs, target): rating = sorting.FieldFacet("rating", reverse=True) tag = sorting.FieldFacet("tag") - check(dict(sortedby=price), "h b l c e g i k j d f a") - check(dict(sortedby=price, collapse=tag), "h b l c e d") - check(dict(sortedby=price, collapse=tag, collapse_order=rating), "h b l i k d") + check({"sortedby": price}, "h b l c e g i k j d f a") + check({"sortedby": price, "collapse": tag}, "h b l c e d") + check( + {"sortedby": price, "collapse": tag, "collapse_order": rating}, + "h b l i k d", + ) def test_collapse_order_nocolumn(): @@ -1576,9 +1579,12 @@ def check(kwargs, target): rating = sorting.FieldFacet("rating", reverse=True) tag = sorting.FieldFacet("tag") - check(dict(sortedby=price), "h b l c e g i k j d f a") - check(dict(sortedby=price, collapse=tag), "h b l c e d") - check(dict(sortedby=price, collapse=tag, collapse_order=rating), "h b l i k d") + check({"sortedby": price}, "h b l c e g i k j d f a") + check({"sortedby": price, "collapse": tag}, "h b l c e d") + check( + {"sortedby": price, "collapse": tag, "collapse_order": rating}, + "h b l i k d", + ) def test_coord(): diff --git a/tests/test_sorting.py b/tests/test_sorting.py index 2883defd..4196222d 100644 --- a/tests/test_sorting.py +++ b/tests/test_sorting.py @@ -1104,10 +1104,7 @@ def test_compound_sort(): ] r = s.search(q, sortedby=sortedby) - output = [] - for hit in r: - output.append(" ".join((hit["a"], hit["b"], hit["c"]))) - + output = [" ".join((hit["a"], hit["b"], hit["c"])) for hit in r] assert output == [ "alfa charlie charlie", "alfa charlie india", diff --git a/tests/test_writing.py b/tests/test_writing.py index ff713082..f3244159 100644 --- a/tests/test_writing.py +++ b/tests/test_writing.py @@ -162,7 +162,7 @@ def test_buffered_update(): w = writing.BufferedWriter(ix, period=None, limit=5) for i in range(10): for char in "abc": - fs = dict(id=char, payload=text_type(i) + char) + fs = {"id": char, "payload": text_type(i) + char} w.update_document(**fs) with w.reader() as r: