From 80980366434e701bc8c0524600bd595e6704b51f Mon Sep 17 00:00:00 2001 From: Golub-Sergey <1.golub.sergey.1@gmail.com> Date: Thu, 24 Sep 2020 19:12:49 +0300 Subject: [PATCH] OeX_ES-28: resolves pr issues --- .travis.yml | 2 +- Makefile | 4 ++-- README.md | 2 +- search/api.py | 9 ++++----- search/elastic.py | 12 ++++++------ search/search_engine_base.py | 2 +- search/tests/mock_search_engine.py | 10 +++++----- search/tests/test_engines.py | 4 ++-- search/tests/tests.py | 10 +++++----- 9 files changed, 27 insertions(+), 28 deletions(-) diff --git a/.travis.yml b/.travis.yml index b6689854..7fce146f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,7 @@ cache: - directories: - $HOME/.cache/pip -before_install: make test.run_elasticsearch +before_install: make test.start_elasticsearch install: - pip install -r requirements/travis.txt diff --git a/Makefile b/Makefile index dc39124d..a84f1651 100644 --- a/Makefile +++ b/Makefile @@ -23,13 +23,13 @@ requirements: validate: clean tox -test.run_elasticsearch: +test.start_elasticsearch: docker-compose up -d test.stop_elasticsearch: docker-compose stop -test_with_es: clean test.run_elasticsearch +test_with_es: clean test.start_elasticsearch coverage run --source='.' manage.py test make test.stop_elasticsearch diff --git a/README.md b/README.md index e198940c..9dccb655 100644 --- a/README.md +++ b/README.md @@ -260,7 +260,7 @@ make test_with_es ``` To simply run the container without starting the tests, run: ``` -make test.run_elasticsearch +make test.start_elasticsearch ``` To stop an Elasticsearch Docker container, run: ``` diff --git a/search/api.py b/search/api.py index e311896a..81c26090 100644 --- a/search/api.py +++ b/search/api.py @@ -52,10 +52,9 @@ def perform_search( """ # field_, filter_ and exclude_dictionary(s) can be overridden by calling application # field_dictionary includes course if course_id provided - field_dictionary, filter_dictionary, exclude_dictionary = ( - SearchFilterGenerator.generate_field_filters( - user=user, course_id=course_id - ) + (field_dictionary, filter_dictionary, exclude_dictionary) = SearchFilterGenerator.generate_field_filters( + user=user, + course_id=course_id ) searcher = SearchEngine.get_search_engine( @@ -115,7 +114,7 @@ def course_discovery_search(search_term=None, size=20, from_=0, field_dictionary # show if no enrollment end is provided and has not yet been reached filter_dictionary={"enrollment_end": DateRange(datetime.utcnow(), None)}, exclude_dictionary=exclude_dictionary, - agg_terms=course_discovery_aggregations(), + aggregation_terms=course_discovery_aggregations(), ) return results diff --git a/search/elastic.py b/search/elastic.py index b0d315d0..87b4e474 100644 --- a/search/elastic.py +++ b/search/elastic.py @@ -218,12 +218,12 @@ def _get_total_doc_key(bucket_name): return "total_{}_docs".format(bucket_name) -def _process_aggregation_terms(agg_terms): +def _process_aggregation_terms(aggregation_terms): """ We have a list of terms with which we return aggregated result. """ elastic_aggs = {} - for bucket, options in agg_terms.items(): + for bucket, options in aggregation_terms.items(): agg_term = {agg_option: options[agg_option] for agg_option in options} agg_term["field"] = bucket elastic_aggs[bucket] = { @@ -461,7 +461,7 @@ def search(self, field_dictionary=None, filter_dictionary=None, exclude_dictionary=None, - agg_terms=None, + aggregation_terms=None, exclude_ids=None, use_field_match=False, **kwargs): # pylint: disable=arguments-differ, unused-argument @@ -485,7 +485,7 @@ def search(self, documents which have any of these fields and for which the value matches one of the specified values shall be filtered out of the result set - agg_terms (dict): dictionary of terms to include within search + aggregation_terms (dict): dictionary of terms to include within search aggregation list - key is the term desired to aggregate upon, and the value is a dictionary of extended information to include. Supported right now is a size specification for a cap upon how many aggregation results to return (can @@ -638,8 +638,8 @@ def search(self, } body = {"query": query} - if agg_terms: - body["aggs"] = _process_aggregation_terms(agg_terms) + if aggregation_terms: + body["aggs"] = _process_aggregation_terms(aggregation_terms) try: es_response = self._es.search(index=self.index_name, body=body, **kwargs) diff --git a/search/search_engine_base.py b/search/search_engine_base.py index 9e67523a..a1f34985 100644 --- a/search/search_engine_base.py +++ b/search/search_engine_base.py @@ -34,7 +34,7 @@ def search(self, field_dictionary=None, filter_dictionary=None, exclude_dictionary=None, - agg_terms=None, + aggregation_terms=None, **kwargs): # pylint: disable=too-many-arguments """ Search for matching documents within the search index. diff --git a/search/tests/mock_search_engine.py b/search/tests/mock_search_engine.py index b906a5d6..1711563b 100644 --- a/search/tests/mock_search_engine.py +++ b/search/tests/mock_search_engine.py @@ -150,7 +150,7 @@ def _process_exclude_dictionary(documents_to_search, exclude_dictionary): return documents_to_search -def _count_aggregated_values(documents, agg_terms): +def _count_aggregated_values(documents, aggregation_terms): """ Calculate the counts for the aggregations provided: @@ -194,7 +194,7 @@ def add_agg_value(agg_value): return total, terms - for agg in agg_terms: + for agg in aggregation_terms: total, terms = process_aggregation(agg) aggregations[agg] = { "total": total, @@ -339,7 +339,7 @@ def search(self, field_dictionary=None, filter_dictionary=None, exclude_dictionary=None, - agg_terms=None, + aggregation_terms=None, **kwargs): # pylint: disable=too-many-arguments """ Perform search upon documents within index. @@ -412,7 +412,7 @@ def score_documents(documents_to_search): "results": results } - if agg_terms: - response["aggs"] = _count_aggregated_values(documents_to_search, agg_terms) + if aggregation_terms: + response["aggs"] = _count_aggregated_values(documents_to_search, aggregation_terms) return response diff --git a/search/tests/test_engines.py b/search/tests/test_engines.py index bf0f1589..9bad1373 100644 --- a/search/tests/test_engines.py +++ b/search/tests/test_engines.py @@ -56,11 +56,11 @@ def test_aggregation_options(self): self.assertEqual(response["total"], 7) self.assertNotIn("aggs", response) - agg_terms = { + aggregation_terms = { "subject": {"size": 2}, "org": {"size": 2} } - response = self.searcher.search(agg_terms=agg_terms) + response = self.searcher.search(aggregation_terms=aggregation_terms) self.assertEqual(response["total"], 7) self.assertIn("aggs", response) aggregation_results = response["aggs"] diff --git a/search/tests/tests.py b/search/tests/tests.py index dacad493..aaed817b 100644 --- a/search/tests/tests.py +++ b/search/tests/tests.py @@ -783,11 +783,11 @@ def test_aggregation_search(self): self.assertEqual(response["total"], 7) self.assertNotIn("aggs", response) - agg_terms = { + aggregation_terms = { "subject": {}, "org": {} } - response = self.searcher.search(agg_terms=agg_terms) + response = self.searcher.search(aggregation_terms=aggregation_terms) self.assertEqual(response["total"], 7) self.assertIn("aggs", response) aggregation_results = response["aggs"] @@ -808,13 +808,13 @@ def test_filtered_aggregation_search(self): """ Test that aggregation works well alongside filtered results """ self._index_for_aggs() - agg_terms = { + aggregation_terms = { "subject": {}, "org": {} } response = self.searcher.search( field_dictionary={"org": "Harvard"}, - agg_terms=agg_terms + aggregation_terms=aggregation_terms ) self.assertEqual(response["total"], 4) self.assertIn("aggs", response) @@ -834,7 +834,7 @@ def test_filtered_aggregation_search(self): response = self.searcher.search( field_dictionary={"subject": ["physics", "history"]}, - agg_terms=agg_terms + aggregation_terms=aggregation_terms ) self.assertEqual(response["total"], 3) self.assertIn("aggs", response)