From 4766bc32802cc553d25703656efc6696ebf85da2 Mon Sep 17 00:00:00 2001 From: Soban Javed Date: Thu, 23 Apr 2020 13:36:46 +0500 Subject: [PATCH] added python 3.8 in test matrix Upgrade pylint and fix quality Remove six.PY2 checks to fix the coverage Pin ddt --- .travis.yml | 18 +----- codecov.yml | 3 +- requirements/base.txt | 11 ++-- requirements/constraints.txt | 10 +++- requirements/dev.txt | 65 ++++++++++----------- requirements/pip-tools.txt | 9 ++- requirements/quality.txt | 34 +++++------ requirements/testing.txt | 14 ++--- requirements/travis.txt | 34 +++++------ search/api.py | 2 - search/elastic.py | 12 ++-- search/filter_generator.py | 2 +- search/initializer.py | 3 +- search/result_processor.py | 15 +---- search/search_engine_base.py | 2 +- search/tests/mock_search_engine.py | 13 ++--- search/tests/test_course_discovery.py | 2 +- search/tests/test_course_discovery_views.py | 28 ++++----- search/tests/test_views.py | 14 ++--- search/tests/utils.py | 2 +- search/utils.py | 2 +- setup.py | 8 +-- tox.ini | 6 +- 23 files changed, 132 insertions(+), 177 deletions(-) diff --git a/.travis.yml b/.travis.yml index 8dcd704a..6e156730 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,25 +4,9 @@ python: - 3.5 envs: -- TOXENV=django111 -- TOXENV=django20 -- TOXENV=django21 - TOXENV=django22 - TOXENV=quality -matrix: - include: - - python: 2.7 - env: TOXENV=quality - - python: 2.7 - env: TOXENV=django111 - - python: 3.6 - env: TOXENV=django22 - - python: 3.7 - env: TOXENV=django22 - - python: 3.8 - env: TOXENV=django22 - addons: apt: packages: @@ -56,4 +40,4 @@ deploy: on: tags: true python: 3.5 - condition: '$TOXENV = django111' + condition: '$TOXENV = django22' diff --git a/codecov.yml b/codecov.yml index 4da47686..3328a919 100644 --- a/codecov.yml +++ b/codecov.yml @@ -4,9 +4,10 @@ coverage: default: enabled: yes target: auto + threshold: 0.5 patch: default: enabled: yes - target: 100% + target: 97% comment: false diff --git a/requirements/base.txt b/requirements/base.txt index 87ee7360..7d0f678a 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,10 +4,11 @@ # # make upgrade # -django==1.11.29 # via -r requirements/base.in, event-tracking +django==2.2.13 # via -r requirements/base.in, event-tracking elasticsearch==1.9.0 # via -r requirements/base.in -event-tracking==0.3.0 # via -r requirements/base.in +event-tracking==0.3.2 # via -r requirements/base.in pymongo==3.10.1 # via event-tracking -pytz==2019.3 # via django, event-tracking -six==1.14.0 # via -r requirements/base.in, event-tracking -urllib3==1.25.8 # via elasticsearch +pytz==2020.1 # via django, event-tracking +six==1.15.0 # via -r requirements/base.in, event-tracking +sqlparse==0.3.1 # via django +urllib3==1.25.9 # via elasticsearch diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1b740823..4ea178f0 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,5 +8,11 @@ # pin when possible. Writing an issue against the offending project and # linking to it here is good. -# This packages is a backport which can only be installed on Python 2.7 -futures ; python_version == "2.7" \ No newline at end of file +# ddt >= 1.4.0 causing test failures +ddt < 1.4.0 + +# newer version is causing failure +tox-battery==0.5.2 + + +coverage<5.1 \ No newline at end of file diff --git a/requirements/dev.txt b/requirements/dev.txt index 2aa0fab5..8333c65e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,56 +4,51 @@ # # make upgrade # -appdirs==1.4.3 # via -r requirements/travis.txt, virtualenv -astroid==1.6.6 # via -r requirements/quality.txt, pylint, pylint-celery -backports.functools-lru-cache==1.6.1 # via -r requirements/quality.txt, astroid, isort, pylint -certifi==2019.11.28 # via -r requirements/travis.txt, requests +appdirs==1.4.4 # via -r requirements/travis.txt, virtualenv +astroid==2.3.3 # via -r requirements/quality.txt, pylint, pylint-celery +certifi==2020.4.5.2 # via -r requirements/travis.txt, requests chardet==3.0.4 # via -r requirements/travis.txt, requests click-log==0.3.2 # via -r requirements/quality.txt, edx-lint -click==7.1.1 # via -r requirements/pip-tools.txt, -r requirements/quality.txt, click-log, edx-lint, pip-tools -codecov==2.0.22 # via -r requirements/travis.txt -configparser==4.0.2 # via -r requirements/quality.txt, -r requirements/travis.txt, importlib-metadata, pylint -contextlib2==0.6.0.post1 # via -r requirements/travis.txt, importlib-metadata, importlib-resources, virtualenv, zipp -coverage==5.0.4 # via -r requirements/quality.txt, -r requirements/testing.txt, -r requirements/travis.txt, codecov -ddt==1.3.1 # via -r requirements/quality.txt, -r requirements/testing.txt +click==7.1.2 # via -r requirements/pip-tools.txt, -r requirements/quality.txt, click-log, edx-lint, pip-tools +codecov==2.1.4 # via -r requirements/travis.txt +coverage==5.0.4 # via -c requirements/constraints.txt, -r requirements/quality.txt, -r requirements/testing.txt, -r requirements/travis.txt, codecov +ddt==1.3.1 # via -c requirements/constraints.txt, -r requirements/quality.txt, -r requirements/testing.txt distlib==0.3.0 # via -r requirements/travis.txt, virtualenv -django==1.11.29 # via -r requirements/quality.txt, -r requirements/testing.txt, event-tracking +django==2.2.13 # via -r requirements/quality.txt, -r requirements/testing.txt, event-tracking edx-lint==1.4.1 # via -r requirements/quality.txt elasticsearch==1.9.0 # via -r requirements/quality.txt, -r requirements/testing.txt -enum34==1.1.10 # via -r requirements/quality.txt, astroid -event-tracking==0.3.0 # via -r requirements/quality.txt, -r requirements/testing.txt +event-tracking==0.3.2 # via -r requirements/quality.txt, -r requirements/testing.txt filelock==3.0.12 # via -r requirements/travis.txt, tox, virtualenv -funcsigs==1.0.2 # via -r requirements/quality.txt, -r requirements/testing.txt, mock -futures==3.3.0 ; python_version == "2.7" # via -c requirements/constraints.txt, -r requirements/quality.txt, isort idna==2.9 # via -r requirements/travis.txt, requests -importlib-metadata==1.6.0 # via -r requirements/travis.txt, importlib-resources, pluggy, tox, virtualenv -importlib-resources==1.4.0 # via -r requirements/travis.txt, virtualenv +importlib-metadata==1.6.1 # via -r requirements/travis.txt, importlib-resources, pluggy, tox, virtualenv +importlib-resources==1.5.0 # via -r requirements/travis.txt, virtualenv isort==4.3.21 # via -r requirements/quality.txt, pylint lazy-object-proxy==1.4.3 # via -r requirements/quality.txt, astroid mccabe==0.6.1 # via -r requirements/quality.txt, pylint mock==3.0.5 # via -r requirements/quality.txt, -r requirements/testing.txt -packaging==20.3 # via -r requirements/travis.txt, tox -pathlib2==2.3.5 # via -r requirements/travis.txt, importlib-metadata, importlib-resources, virtualenv -pip-tools==4.5.1 # via -r requirements/pip-tools.txt +packaging==20.4 # via -r requirements/travis.txt, tox +pip-tools==5.2.1 # via -r requirements/pip-tools.txt pluggy==0.13.1 # via -r requirements/travis.txt, tox py==1.8.1 # via -r requirements/travis.txt, tox -pycodestyle==2.5.0 # via -r requirements/quality.txt +pycodestyle==2.6.0 # via -r requirements/quality.txt pylint-celery==0.3 # via -r requirements/quality.txt, edx-lint -pylint-django==0.11.1 # via -r requirements/quality.txt, edx-lint +pylint-django==2.0.11 # via -r requirements/quality.txt, edx-lint pylint-plugin-utils==0.6 # via -r requirements/quality.txt, pylint-celery, pylint-django -pylint==1.9.5 # via -r requirements/quality.txt, edx-lint, pylint-celery, pylint-django, pylint-plugin-utils +pylint==2.4.2 # via -r requirements/quality.txt, edx-lint, pylint-celery, pylint-django, pylint-plugin-utils pymongo==3.10.1 # via -r requirements/quality.txt, -r requirements/testing.txt, event-tracking -pyparsing==2.4.6 # via -r requirements/travis.txt, packaging -pytz==2019.3 # via -r requirements/quality.txt, -r requirements/testing.txt, django, event-tracking +pyparsing==2.4.7 # via -r requirements/travis.txt, packaging +pytz==2020.1 # via -r requirements/quality.txt, -r requirements/testing.txt, django, event-tracking requests==2.23.0 # via -r requirements/travis.txt, codecov -scandir==1.10.0 # via -r requirements/travis.txt, pathlib2 -singledispatch==3.4.0.3 # via -r requirements/quality.txt, -r requirements/travis.txt, astroid, importlib-resources, pylint -six==1.14.0 # via -r requirements/pip-tools.txt, -r requirements/quality.txt, -r requirements/testing.txt, -r requirements/travis.txt, astroid, edx-lint, event-tracking, mock, packaging, pathlib2, pip-tools, pylint, singledispatch, tox, virtualenv -toml==0.10.0 # via -r requirements/travis.txt, tox -tox-battery==0.5.2 # via -r requirements/travis.txt -tox==3.14.6 # via -r requirements/travis.txt, tox-battery -typing==3.7.4.1 # via -r requirements/travis.txt, importlib-resources -urllib3==1.25.8 # via -r requirements/quality.txt, -r requirements/testing.txt, -r requirements/travis.txt, elasticsearch, requests -virtualenv==20.0.15 # via -r requirements/travis.txt, tox -wrapt==1.12.1 # via -r requirements/quality.txt, astroid +six==1.15.0 # via -r requirements/pip-tools.txt, -r requirements/quality.txt, -r requirements/testing.txt, -r requirements/travis.txt, astroid, edx-lint, event-tracking, mock, packaging, pip-tools, tox, virtualenv +sqlparse==0.3.1 # via -r requirements/quality.txt, -r requirements/testing.txt, django +toml==0.10.1 # via -r requirements/travis.txt, tox +tox-battery==0.5.2 # via -c requirements/constraints.txt, -r requirements/travis.txt +tox==3.15.2 # via -r requirements/travis.txt, tox-battery +typed-ast==1.4.1 # via -r requirements/quality.txt, astroid +urllib3==1.25.9 # via -r requirements/quality.txt, -r requirements/testing.txt, -r requirements/travis.txt, elasticsearch, requests +virtualenv==20.0.21 # via -r requirements/travis.txt, tox +wrapt==1.11.2 # via -r requirements/quality.txt, astroid zipp==1.2.0 # via -r requirements/travis.txt, importlib-metadata, importlib-resources + +# The following packages are considered to be unsafe in a requirements file: +# pip diff --git a/requirements/pip-tools.txt b/requirements/pip-tools.txt index a99fe207..1ae2c19f 100644 --- a/requirements/pip-tools.txt +++ b/requirements/pip-tools.txt @@ -4,6 +4,9 @@ # # make upgrade # -click==7.1.1 # via pip-tools -pip-tools==4.5.1 # via -r requirements/pip-tools.in -six==1.14.0 # via pip-tools +click==7.1.2 # via pip-tools +pip-tools==5.2.1 # via -r requirements/pip-tools.in +six==1.15.0 # via pip-tools + +# The following packages are considered to be unsafe in a requirements file: +# pip diff --git a/requirements/quality.txt b/requirements/quality.txt index 6b48ea19..98d2a712 100644 --- a/requirements/quality.txt +++ b/requirements/quality.txt @@ -4,32 +4,28 @@ # # make upgrade # -astroid==1.6.6 # via pylint, pylint-celery -backports.functools-lru-cache==1.6.1 # via astroid, isort, pylint +astroid==2.3.3 # via pylint, pylint-celery click-log==0.3.2 # via edx-lint -click==7.1.1 # via click-log, edx-lint -configparser==4.0.2 # via pylint -coverage==5.0.4 # via -r requirements/quality.in, -r requirements/testing.txt -ddt==1.3.1 # via -r requirements/testing.txt -django==1.11.29 # via -r requirements/testing.txt, event-tracking +click==7.1.2 # via click-log, edx-lint +coverage==5.0.4 # via -c requirements/constraints.txt, -r requirements/quality.in, -r requirements/testing.txt +ddt==1.3.1 # via -c requirements/constraints.txt, -r requirements/testing.txt +django==2.2.13 # via -r requirements/testing.txt, event-tracking edx-lint==1.4.1 # via -r requirements/quality.in elasticsearch==1.9.0 # via -r requirements/testing.txt -enum34==1.1.10 # via astroid -event-tracking==0.3.0 # via -r requirements/testing.txt -funcsigs==1.0.2 # via -r requirements/testing.txt, mock -futures==3.3.0 ; python_version == "2.7" # via -c requirements/constraints.txt, isort +event-tracking==0.3.2 # via -r requirements/testing.txt isort==4.3.21 # via pylint lazy-object-proxy==1.4.3 # via astroid mccabe==0.6.1 # via pylint mock==3.0.5 # via -r requirements/testing.txt -pycodestyle==2.5.0 # via -r requirements/quality.in +pycodestyle==2.6.0 # via -r requirements/quality.in pylint-celery==0.3 # via edx-lint -pylint-django==0.11.1 # via edx-lint +pylint-django==2.0.11 # via edx-lint pylint-plugin-utils==0.6 # via pylint-celery, pylint-django -pylint==1.9.5 # via edx-lint, pylint-celery, pylint-django, pylint-plugin-utils +pylint==2.4.2 # via edx-lint, pylint-celery, pylint-django, pylint-plugin-utils pymongo==3.10.1 # via -r requirements/testing.txt, event-tracking -pytz==2019.3 # via -r requirements/testing.txt, django, event-tracking -singledispatch==3.4.0.3 # via astroid, pylint -six==1.14.0 # via -r requirements/testing.txt, astroid, edx-lint, event-tracking, mock, pylint, singledispatch -urllib3==1.25.8 # via -r requirements/testing.txt, elasticsearch -wrapt==1.12.1 # via astroid +pytz==2020.1 # via -r requirements/testing.txt, django, event-tracking +six==1.15.0 # via -r requirements/testing.txt, astroid, edx-lint, event-tracking, mock +sqlparse==0.3.1 # via -r requirements/testing.txt, django +typed-ast==1.4.1 # via astroid +urllib3==1.25.9 # via -r requirements/testing.txt, elasticsearch +wrapt==1.11.2 # via astroid diff --git a/requirements/testing.txt b/requirements/testing.txt index 1d02fb57..7fc4a946 100644 --- a/requirements/testing.txt +++ b/requirements/testing.txt @@ -4,13 +4,13 @@ # # make upgrade # -coverage==5.0.4 # via -r requirements/testing.in -ddt==1.3.1 # via -r requirements/testing.in +coverage==5.0.4 # via -c requirements/constraints.txt, -r requirements/testing.in +ddt==1.3.1 # via -c requirements/constraints.txt, -r requirements/testing.in elasticsearch==1.9.0 # via -r requirements/base.txt -event-tracking==0.3.0 # via -r requirements/base.txt -funcsigs==1.0.2 # via mock +event-tracking==0.3.2 # via -r requirements/base.txt mock==3.0.5 # via -r requirements/testing.in pymongo==3.10.1 # via -r requirements/base.txt, event-tracking -pytz==2019.3 # via -r requirements/base.txt, django, event-tracking -six==1.14.0 # via -r requirements/base.txt, event-tracking, mock -urllib3==1.25.8 # via -r requirements/base.txt, elasticsearch +pytz==2020.1 # via -r requirements/base.txt, django, event-tracking +six==1.15.0 # via -r requirements/base.txt, event-tracking, mock +sqlparse==0.3.1 # via -r requirements/base.txt, django +urllib3==1.25.9 # via -r requirements/base.txt, elasticsearch diff --git a/requirements/travis.txt b/requirements/travis.txt index 99c25b8a..12567e6f 100644 --- a/requirements/travis.txt +++ b/requirements/travis.txt @@ -4,31 +4,25 @@ # # make upgrade # -appdirs==1.4.3 # via virtualenv -certifi==2019.11.28 # via requests +appdirs==1.4.4 # via virtualenv +certifi==2020.4.5.2 # via requests chardet==3.0.4 # via requests -codecov==2.0.22 # via -r requirements/travis.in -configparser==4.0.2 # via importlib-metadata -contextlib2==0.6.0.post1 # via importlib-metadata, importlib-resources, virtualenv, zipp -coverage==5.0.4 # via codecov +codecov==2.1.4 # via -r requirements/travis.in +coverage==5.0.4 # via -c requirements/constraints.txt, codecov distlib==0.3.0 # via virtualenv filelock==3.0.12 # via tox, virtualenv idna==2.9 # via requests -importlib-metadata==1.6.0 # via importlib-resources, pluggy, tox, virtualenv -importlib-resources==1.4.0 # via virtualenv -packaging==20.3 # via tox -pathlib2==2.3.5 # via importlib-metadata, importlib-resources, virtualenv +importlib-metadata==1.6.1 # via importlib-resources, pluggy, tox, virtualenv +importlib-resources==1.5.0 # via virtualenv +packaging==20.4 # via tox pluggy==0.13.1 # via tox py==1.8.1 # via tox -pyparsing==2.4.6 # via packaging +pyparsing==2.4.7 # via packaging requests==2.23.0 # via codecov -scandir==1.10.0 # via pathlib2 -singledispatch==3.4.0.3 # via importlib-resources -six==1.14.0 # via packaging, pathlib2, tox, virtualenv -toml==0.10.0 # via tox -tox-battery==0.5.2 # via -r requirements/travis.in -tox==3.14.6 # via -r requirements/travis.in, tox-battery -typing==3.7.4.1 # via importlib-resources -urllib3==1.25.8 # via requests -virtualenv==20.0.15 # via tox +six==1.15.0 # via packaging, tox, virtualenv +toml==0.10.1 # via tox +tox-battery==0.5.2 # via -c requirements/constraints.txt, -r requirements/travis.in +tox==3.15.2 # via -r requirements/travis.in, tox-battery +urllib3==1.25.9 # via requests +virtualenv==20.0.21 # via tox zipp==1.2.0 # via importlib-metadata, importlib-resources diff --git a/search/api.py b/search/api.py index eea1bf34..fdba1f08 100644 --- a/search/api.py +++ b/search/api.py @@ -25,7 +25,6 @@ def course_discovery_facets(): class NoSearchEngineError(Exception): """ NoSearchEngineError exception to be thrown if no search engine is specified """ - pass class QueryParseError(Exception): @@ -36,7 +35,6 @@ class QueryParseError(Exception): error message to the user. """ - pass def perform_search( diff --git a/search/elastic.py b/search/elastic.py index 147e1419..e41781e8 100644 --- a/search/elastic.py +++ b/search/elastic.py @@ -538,10 +538,7 @@ def search(self, # We have a query string, search all fields for matching text within the "content" node if query_string: - if six.PY2: - query_string = query_string.encode('utf-8').translate(None, RESERVED_CHARACTERS) - else: - query_string = query_string.translate(query_string.maketrans('', '', RESERVED_CHARACTERS)) + query_string = query_string.translate(query_string.maketrans('', '', RESERVED_CHARACTERS)) elastic_queries.append({ "query_string": { "fields": ["content.*"], @@ -610,9 +607,8 @@ def search(self, if 'QueryParsingException' in message: log.exception("Malformed search query: %s", message) # lint-amnesty, pylint: disable=unicode-format-string raise QueryParseError('Malformed search query.') - else: - # log information and re-raise - log.exception("error while searching index - %s", str(message)) # lint-amnesty, pylint: disable=unicode-format-string - raise + # log information and re-raise + log.exception("error while searching index - %s", str(message)) # lint-amnesty, pylint: disable=unicode-format-string + raise return _translate_hits(es_response) diff --git a/search/filter_generator.py b/search/filter_generator.py index c58f11af..bceee778 100644 --- a/search/filter_generator.py +++ b/search/filter_generator.py @@ -7,7 +7,7 @@ from .utils import _load_class, DateRange -class SearchFilterGenerator(object): +class SearchFilterGenerator: """ Class to provide a set of filters for the search. diff --git a/search/initializer.py b/search/initializer.py index 20370fae..39874b22 100644 --- a/search/initializer.py +++ b/search/initializer.py @@ -6,7 +6,7 @@ from .utils import _load_class -class SearchInitializer(object): +class SearchInitializer: """ Class to set starting environment parameters for search app. @@ -17,7 +17,6 @@ class SearchInitializer(object): # pylint: disable=unused-argument, no-self-use def initialize(self, **kwargs): """ empty base implementation """ - pass @classmethod def set_search_enviroment(cls, **kwargs): diff --git a/search/result_processor.py b/search/result_processor.py index 076b21b6..b8a755b9 100644 --- a/search/result_processor.py +++ b/search/result_processor.py @@ -21,7 +21,7 @@ log = logging.getLogger(__name__) # pylint: disable=invalid-name -class SearchResultProcessor(object): +class SearchResultProcessor: """ Class to post-process a search result from the search. @@ -81,7 +81,7 @@ def has_match(string): def decorate_matches(match_in, match_word): """ decorate the matches within the excerpt """ matches = re.finditer(match_word, match_in, re.IGNORECASE) - for matched_string in set([match.group() for match in matches]): + for matched_string in {match.group() for match in matches}: match_in = match_in.replace( matched_string, getattr(settings, "SEARCH_MATCH_DECORATION", u"{}").format(matched_string) @@ -132,16 +132,7 @@ def excerpt(self): return None match_phrases = [self._match_phrase] - if six.PY2: - separate_phrases = [ - phrase.decode('utf-8') - for phrase in shlex.split(self._match_phrase.encode('utf-8')) - ] - else: - separate_phrases = [ - phrase - for phrase in shlex.split(self._match_phrase) - ] + separate_phrases = list(shlex.split(self._match_phrase)) if len(separate_phrases) > 1: match_phrases.extend(separate_phrases) else: diff --git a/search/search_engine_base.py b/search/search_engine_base.py index 640786d4..9f09c6b4 100644 --- a/search/search_engine_base.py +++ b/search/search_engine_base.py @@ -6,7 +6,7 @@ from .utils import _load_class -class SearchEngine(object): +class SearchEngine: """ Base abstract SearchEngine object """ diff --git a/search/tests/mock_search_engine.py b/search/tests/mock_search_engine.py index bd5581e1..0a436eb2 100644 --- a/search/tests/mock_search_engine.py +++ b/search/tests/mock_search_engine.py @@ -91,13 +91,13 @@ def value_matches(doc, field_name, field_value): (field_value.lower is None or compare_value >= field_value.lower) and (field_value.upper is None or compare_value <= field_value.upper) ) - elif _is_iterable(compare_value) and not _is_iterable(field_value): + if _is_iterable(compare_value) and not _is_iterable(field_value): return any((item == field_value for item in compare_value)) - elif _is_iterable(field_value) and not _is_iterable(compare_value): + if _is_iterable(field_value) and not _is_iterable(compare_value): return any((item == compare_value for item in field_value)) - elif _is_iterable(compare_value) and _is_iterable(field_value): + if _is_iterable(compare_value) and _is_iterable(field_value): return any((six.text_type(item) in field_value for item in compare_value)) return compare_value == field_value @@ -113,10 +113,7 @@ def _process_query_string(documents_to_search, query_string): """ keep the documents that contain at least one of the search strings provided """ def _encode_string(string): """Encode a Unicode string in the same way as the Elasticsearch search engine.""" - if six.PY2: - string = string.encode('utf-8').translate(None, RESERVED_CHARACTERS) - else: - string = string.translate(string.maketrans('', '', RESERVED_CHARACTERS)) + string = string.translate(string.maketrans('', '', RESERVED_CHARACTERS)) return string def has_string(dictionary_object, search_string): @@ -124,7 +121,7 @@ def has_string(dictionary_object, search_string): for name in dictionary_object: if isinstance(dictionary_object[name], dict): return has_string(dictionary_object[name], search_string) - elif dictionary_object[name]: + if dictionary_object[name]: if search_string.lower() in _encode_string(dictionary_object[name].lower()): return True return False diff --git a/search/tests/test_course_discovery.py b/search/tests/test_course_discovery.py index f0197fbc..1683ab5f 100644 --- a/search/tests/test_course_discovery.py +++ b/search/tests/test_course_discovery.py @@ -21,7 +21,7 @@ from .mock_search_engine import MockSearchEngine -class DemoCourse(object): +class DemoCourse: """ Class for dispensing demo courses """ DEMO_COURSE_ID = "edX/DemoX/Demo_Course" DEMO_COURSE = { diff --git a/search/tests/test_course_discovery_views.py b/search/tests/test_course_discovery_views.py index a2bca23e..4e498316 100644 --- a/search/tests/test_course_discovery_views.py +++ b/search/tests/test_course_discovery_views.py @@ -39,58 +39,58 @@ def setUp(self): def test_search_from_url(self): """ test searching using the url """ code, results = post_discovery_request({}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) code, results = post_discovery_request({"search_string": "right"}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 1) code, results = post_discovery_request({"search_string": "parameter"}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 2) code, results = post_discovery_request({"search_string": "Find this one"}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) def test_pagination(self): """ test that paging attributes are correctly applied """ code, results = post_discovery_request({"search_string": "Find this one"}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) self.assertEqual(len(results["results"]), 3) code, results = post_discovery_request({"search_string": "Find this one", "page_size": 1}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) self.assertEqual(len(results["results"]), 1) result_ids = [r["data"]["id"] for r in results["results"]] self.assertIn(DemoCourse.DEMO_COURSE_ID + "_1", result_ids) code, results = post_discovery_request({"search_string": "Find this one", "page_size": 1, "page_index": 0}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) self.assertEqual(len(results["results"]), 1) result_ids = [r["data"]["id"] for r in results["results"]] self.assertIn(DemoCourse.DEMO_COURSE_ID + "_1", result_ids) code, results = post_discovery_request({"search_string": "Find this one", "page_size": 1, "page_index": 1}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) self.assertEqual(len(results["results"]), 1) result_ids = [r["data"]["id"] for r in results["results"]] self.assertIn(DemoCourse.DEMO_COURSE_ID + "_2", result_ids) code, results = post_discovery_request({"search_string": "Find this one", "page_size": 1, "page_index": 2}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) self.assertEqual(len(results["results"]), 1) result_ids = [r["data"]["id"] for r in results["results"]] self.assertIn(DemoCourse.DEMO_COURSE_ID + "_3", result_ids) code, results = post_discovery_request({"search_string": "Find this one", "page_size": 2}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) self.assertEqual(len(results["results"]), 2) result_ids = [r["data"]["id"] for r in results["results"]] @@ -98,7 +98,7 @@ def test_pagination(self): self.assertIn(DemoCourse.DEMO_COURSE_ID + "_2", result_ids) code, results = post_discovery_request({"search_string": "Find this one", "page_size": 2, "page_index": 0}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) self.assertEqual(len(results["results"]), 2) result_ids = [r["data"]["id"] for r in results["results"]] @@ -106,7 +106,7 @@ def test_pagination(self): self.assertIn(DemoCourse.DEMO_COURSE_ID + "_2", result_ids) code, results = post_discovery_request({"search_string": "Find this one", "page_size": 2, "page_index": 1}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) self.assertEqual(len(results["results"]), 1) result_ids = [r["data"]["id"] for r in results["results"]] @@ -115,14 +115,14 @@ def test_pagination(self): def test_field_matching(self): """ test that requests can specify field matches """ code, results = post_discovery_request({"org": "OrgA"}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 1) self.assertEqual(len(results["results"]), 1) result_ids = [r["data"]["id"] for r in results["results"]] self.assertIn(DemoCourse.DEMO_COURSE_ID + "_1", result_ids) code, results = post_discovery_request({"org": "OrgB"}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 1) self.assertEqual(len(results["results"]), 1) result_ids = [r["data"]["id"] for r in results["results"]] diff --git a/search/tests/test_views.py b/search/tests/test_views.py index b7c4f20c..79a29703 100644 --- a/search/tests/test_views.py +++ b/search/tests/test_views.py @@ -118,7 +118,7 @@ def test_search_from_url(self): self._reset_mocked_tracker() code, results = post_request({"search_string": "sun"}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 2) result_ids = [r["data"]["id"] for r in results["results"]] self.assertTrue("FAKE_ID_3" in result_ids and "FAKE_ID_2" in result_ids) @@ -128,7 +128,7 @@ def test_search_from_url(self): self._reset_mocked_tracker() code, results = post_request({"search_string": "Darling"}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 2) result_ids = [r["data"]["id"] for r in results["results"]] self.assertTrue("FAKE_ID_1" in result_ids and "FAKE_ID_2" in result_ids) @@ -138,7 +138,7 @@ def test_search_from_url(self): self._reset_mocked_tracker() code, results = post_request({"search_string": "winter"}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 1) result_ids = [r["data"]["id"] for r in results["results"]] self.assertTrue("FAKE_ID_1" in result_ids and "FAKE_ID_2" not in result_ids) @@ -194,7 +194,7 @@ def test_course_search_url(self): self._reset_mocked_tracker() code, results = post_request({"search_string": "Little Darling"}) - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 3) # Test initiate search and return results were called - and clear mocked tracker @@ -202,7 +202,7 @@ def test_course_search_url(self): self._reset_mocked_tracker() code, results = post_request({"search_string": "Darling"}, "ABC/DEF/GHI") - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 2) result_ids = [r["data"]["id"] for r in results["results"]] self.assertTrue("FAKE_ID_1" in result_ids and "FAKE_ID_2" in result_ids) @@ -212,7 +212,7 @@ def test_course_search_url(self): self._reset_mocked_tracker() code, results = post_request({"search_string": "winter"}, "ABC/DEF/GHI") - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 1) result_ids = [r["data"]["id"] for r in results["results"]] self.assertTrue("FAKE_ID_1" in result_ids and "FAKE_ID_2" not in result_ids and "FAKE_ID_3" not in result_ids) @@ -222,7 +222,7 @@ def test_course_search_url(self): self._reset_mocked_tracker() code, results = post_request({"search_string": "winter"}, "LMN/OPQ/RST") - self.assertTrue(code < 300 and code > 199) + self.assertTrue(199 < code < 300) self.assertEqual(results["total"], 1) result_ids = [r["data"]["id"] for r in results["results"]] self.assertTrue("FAKE_ID_1" not in result_ids and "FAKE_ID_2" not in result_ids and "FAKE_ID_3" in result_ids) diff --git a/search/tests/utils.py b/search/tests/utils.py index 78ebd77d..10435446 100644 --- a/search/tests/utils.py +++ b/search/tests/utils.py @@ -28,7 +28,7 @@ def post_discovery_request(body): # pylint: disable=too-few-public-methods -class SearcherMixin(object): +class SearcherMixin: """ Mixin to provide searcher for the tests """ _searcher = None diff --git a/search/utils.py b/search/utils.py index 6452c074..66e2994b 100644 --- a/search/utils.py +++ b/search/utils.py @@ -25,7 +25,7 @@ def _is_iterable(item): return isinstance(item, collections.Iterable) and not isinstance(item, six.string_types) -class ValueRange(object): +class ValueRange: """ Object to represent a range of values """ diff --git a/setup.py b/setup.py index c786525f..541dbd41 100755 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ def is_requirement(line): setup( name='edx-search', - version='1.3.4', + version='1.4.0', description='Search and index routines for index access', author='edX', author_email='oscm@edx.org', @@ -44,14 +44,10 @@ def is_requirement(line): 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.8', 'Framework :: Django', - 'Framework :: Django :: 1.11', - 'Framework :: Django :: 2.0', - 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', ], packages=['search', 'search.tests'], diff --git a/tox.ini b/tox.ini index 69f9f4e4..d7c083bc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27-django111,py35-django{111,20,21,22},py{36,37,38}-django22,quality +envlist = py35-django22,py38-django{22,30},quality [testenv] setenv = @@ -7,10 +7,8 @@ setenv = # This allows us to reference settings.py PYTHONPATH = {toxinidir} deps = - django111: Django>=1.11,<2 - django20: Django>=2.0,<2.1 - django21: Django>=2.1,<2.2 django22: Django>=2.2,<2.3 + django30: Django>=3.0,<3.1 -r {toxinidir}/requirements/testing.txt commands = python -Wd -m coverage run manage.py test --settings=settings {posargs}