Best Python code snippet using localstack_python
test_search.py
Source:test_search.py
1import dataclasses2import time3import fastapi4from datetime import datetime5import pytest6import app.core7from app.api.api_v1.routers import search8from app.api.api_v1.schemas.search import (9 JitQuery,10 SearchRequestBody,11 SortOrder,12 FilterField,13)14from app.core.search import _FILTER_FIELD_MAP, OpenSearchQueryConfig15_TOTAL_DOCUMENT_COUNT = 716@pytest.mark.search17def test_simple_pagination(test_opensearch, monkeypatch, client, user_token_headers):18 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)19 page1_response = client.post(20 "/api/v1/searches",21 json={22 "query_string": "climate",23 "exact_match": False,24 "limit": 2,25 "offset": 0,26 },27 headers=user_token_headers,28 )29 assert page1_response.status_code == 20030 page1_response_body = page1_response.json()31 page1_documents = page1_response_body["documents"]32 assert len(page1_documents) == 233 page2_response = client.post(34 "/api/v1/searches",35 json={36 "query_string": "climate",37 "exact_match": False,38 "limit": 2,39 "offset": 2,40 },41 headers=user_token_headers,42 )43 assert page2_response.status_code == 20044 page2_response_body = page2_response.json()45 page2_documents = page2_response_body["documents"]46 assert len(page2_documents) == 247 # Sanity check that we really do have 4 different documents48 document_names = {d["document_name"] for d in page1_documents} | {49 d["document_name"] for d in page2_documents50 }51 assert len(document_names) == 452 for d in page1_documents:53 assert d not in page2_documents54@pytest.mark.search55def test_pagination_overlap(test_opensearch, monkeypatch, client, user_token_headers):56 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)57 page1_response = client.post(58 "/api/v1/searches",59 json={60 "query_string": "climate",61 "exact_match": False,62 "limit": 2,63 "offset": 0,64 },65 headers=user_token_headers,66 )67 assert page1_response.status_code == 20068 page1_response_body = page1_response.json()69 page1_documents = page1_response_body["documents"]70 assert len(page1_documents) == 271 page2_response = client.post(72 "/api/v1/searches",73 json={74 "query_string": "climate",75 "exact_match": False,76 "limit": 2,77 "offset": 1,78 },79 headers=user_token_headers,80 )81 assert page2_response.status_code == 20082 page2_response_body = page2_response.json()83 page2_documents = page2_response_body["documents"]84 assert len(page2_documents) == 285 # Sanity check that we really do have 3 different documents86 document_names = {d["document_name"] for d in page1_documents} | {87 d["document_name"] for d in page2_documents88 }89 assert len(document_names) == 390 assert page1_documents[-1] == page2_documents[0]91@pytest.mark.search92def test_search_body_valid(test_opensearch, monkeypatch, client, user_token_headers):93 """Test a simple known valid search responds with success."""94 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)95 response = client.post(96 "/api/v1/searches",97 json={"query_string": "disaster", "exact_match": False},98 headers=user_token_headers,99 )100 assert response.status_code == 200101 response = client.post(102 "/api/v1/searches",103 json={"query_string": "disaster", "exact_match": True},104 headers=user_token_headers,105 )106 assert response.status_code == 200107@pytest.mark.search108def test_jit_query_is_default(109 test_opensearch, monkeypatch, client, user_token_headers, mocker110):111 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)112 jit_query_spy = mocker.spy(app.core.jit_query_wrapper, "jit_query")113 background_task_spy = mocker.spy(fastapi.BackgroundTasks, "add_task")114 response = client.post(115 "/api/v1/searches",116 json={117 "query_string": "climate",118 "exact_match": True,119 },120 headers=user_token_headers,121 )122 assert response.status_code == 200123 # Check the jit query called by checking the background task has been added124 assert jit_query_spy.call_count == 1 or jit_query_spy.call_count == 2125 assert background_task_spy.call_count == 1126@pytest.mark.search127def test_with_jit(test_opensearch, monkeypatch, client, user_token_headers, mocker):128 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)129 jit_query_spy = mocker.spy(app.core.jit_query_wrapper, "jit_query")130 background_task_spy = mocker.spy(fastapi.BackgroundTasks, "add_task")131 response = client.post(132 "/api/v1/searches",133 json={134 "query_string": "climate",135 "exact_match": True,136 },137 headers=user_token_headers,138 )139 assert response.status_code == 200140 # Check the jit query call141 assert jit_query_spy.call_count == 1 or jit_query_spy.call_count == 2142 actual_search_body = jit_query_spy.mock_calls[0].args[1]143 actual_config = jit_query_spy.mock_calls[0].args[2]144 expected_search_body = SearchRequestBody(145 query_string="climate",146 exact_match=True,147 max_passages_per_doc=10,148 keyword_filters=None,149 year_range=None,150 sort_field=None,151 sort_order=SortOrder.DESCENDING,152 jit_query=JitQuery.ENABLED,153 limit=10,154 offset=0,155 )156 assert actual_search_body == expected_search_body157 # Check the first call has overriden the default config158 overrides = {159 "max_doc_count": 20,160 }161 expected_config = dataclasses.replace(OpenSearchQueryConfig(), **overrides)162 assert actual_config == expected_config163 # Check the background query call164 assert background_task_spy.call_count == 1165 actual_bkg_search_body = background_task_spy.mock_calls[0].args[3]166 expected_bkg_search_body = SearchRequestBody(167 query_string="climate",168 exact_match=True,169 max_passages_per_doc=10,170 keyword_filters=None,171 year_range=None,172 sort_field=None,173 sort_order=SortOrder.DESCENDING,174 jit_query=JitQuery.ENABLED,175 limit=10,176 offset=0,177 )178 assert actual_bkg_search_body == expected_bkg_search_body179 # Check the background call is run with default config180 actual_bkg_config = background_task_spy.mock_calls[0].args[4]181 assert actual_bkg_config == OpenSearchQueryConfig()182@pytest.mark.search183def test_without_jit(test_opensearch, monkeypatch, client, user_token_headers, mocker):184 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)185 query_spy = mocker.spy(search._OPENSEARCH_CONNECTION, "query")186 background_task_spy = mocker.spy(fastapi.BackgroundTasks, "add_task")187 response = client.post(188 "/api/v1/searches",189 json={190 "query_string": "climate",191 "exact_match": True,192 "jit_query": "disabled",193 },194 headers=user_token_headers,195 )196 assert response.status_code == 200197 # Ensure nothing has/is going on in the background198 assert background_task_spy.call_count == 0199 assert query_spy.call_count == 1 # Called once as not using jit search200 actual_search_body = query_spy.mock_calls[0].args[0]201 expected_search_body = SearchRequestBody(202 query_string="climate",203 exact_match=True,204 max_passages_per_doc=10,205 keyword_filters=None,206 year_range=None,207 sort_field=None,208 sort_order=SortOrder.DESCENDING,209 jit_query=JitQuery.DISABLED,210 limit=10,211 offset=0,212 )213 assert actual_search_body == expected_search_body214 # Check default config is used215 actual_config = query_spy.mock_calls[0].args[1]216 expected_config = OpenSearchQueryConfig()217 assert actual_config == expected_config218@pytest.mark.search219def test_keyword_filters(220 test_opensearch, monkeypatch, client, user_token_headers, mocker221):222 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)223 query_spy = mocker.spy(search._OPENSEARCH_CONNECTION, "raw_query")224 response = client.post(225 "/api/v1/searches",226 json={227 "query_string": "climate",228 "exact_match": False,229 "keyword_filters": {"countries": ["Kenya"]},230 "jit_query": "disabled",231 },232 headers=user_token_headers,233 )234 assert response.status_code == 200235 assert query_spy.call_count == 1236 query_body = query_spy.mock_calls[0].args[0]237 assert {238 "terms": {_FILTER_FIELD_MAP[FilterField("countries")]: ["Kenya"]}239 } in query_body["query"]["bool"]["filter"]240@pytest.mark.search241def test_invalid_keyword_filters(242 test_opensearch, monkeypatch, client, user_token_headers243):244 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)245 response = client.post(246 "/api/v1/searches",247 json={248 "query_string": "disaster",249 "exact_match": False,250 "keyword_filters": {251 "geographies": ["Kenya"],252 "unknown_filter_no1": ["BOOM"],253 },254 },255 headers=user_token_headers,256 )257 assert response.status_code == 422258@pytest.mark.search259@pytest.mark.parametrize(260 "year_range", [(None, None), (1900, None), (None, 2020), (1900, 2020)]261)262def test_year_range_filters(263 test_opensearch,264 monkeypatch,265 client,266 user_token_headers,267 mocker,268 year_range,269):270 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)271 query_spy = mocker.spy(search._OPENSEARCH_CONNECTION, "raw_query")272 response = client.post(273 "/api/v1/searches",274 json={275 "query_string": "disaster",276 "exact_match": False,277 "year_range": year_range,278 "jit_query": "disabled",279 },280 headers=user_token_headers,281 )282 query_body = query_spy.mock_calls[0].args[0]283 assert response.status_code == 200284 assert query_spy.call_count == 1285 # Check that search query default order is not modified unless requested286 assert query_body["aggs"]["sample"]["aggs"]["top_docs"]["terms"]["order"] == {287 "top_hit": "desc"288 }289 if year_range[0] or year_range[1]:290 expected_range_check = {291 "range": {292 "document_date": dict(293 [294 r295 for r in zip(296 ["gte", "lte"],297 [298 f"01/01/{year_range[0]}"299 if year_range[0] is not None300 else None,301 f"31/12/{year_range[1]}"302 if year_range[1] is not None303 else None,304 ],305 )306 if r[1] is not None307 ]308 )309 }310 }311 assert expected_range_check in query_body["query"]["bool"]["filter"]312 else:313 assert "filter" not in query_body["query"]["bool"]314@pytest.mark.search315def test_multiple_filters(316 test_opensearch, monkeypatch, client, user_token_headers, mocker317):318 """Check that multiple filters are successfully applied"""319 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)320 query_spy = mocker.spy(search._OPENSEARCH_CONNECTION, "raw_query")321 response = client.post(322 "/api/v1/searches",323 json={324 "query_string": "disaster",325 "exact_match": False,326 "keyword_filters": {327 "countries": ["Kenya"],328 "sources": ["CCLW"],329 },330 "year_range": (1900, 2020),331 "jit_query": "disabled",332 },333 headers=user_token_headers,334 )335 assert response.status_code == 200336 assert query_spy.call_count == 1337 query_body = query_spy.mock_calls[0].args[0]338 assert {339 "terms": {_FILTER_FIELD_MAP[FilterField("countries")]: ["Kenya"]}340 } in query_body["query"]["bool"]["filter"]341 assert {342 "terms": {_FILTER_FIELD_MAP[FilterField("sources")]: ["CCLW"]}343 } in query_body["query"]["bool"]["filter"]344 assert {345 "range": {"document_date": {"gte": "01/01/1900", "lte": "31/12/2020"}}346 } in query_body["query"]["bool"]["filter"]347@pytest.mark.search348def test_result_order_score(349 test_opensearch, monkeypatch, client, user_token_headers, mocker350):351 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)352 query_spy = mocker.spy(search._OPENSEARCH_CONNECTION, "raw_query")353 response = client.post(354 "/api/v1/searches",355 json={356 "query_string": "disaster",357 "exact_match": False,358 },359 headers=user_token_headers,360 )361 assert response.status_code == 200362 query_response = query_spy.spy_return.raw_response363 result_docs = query_response["aggregations"]["sample"]["top_docs"]["buckets"]364 s = None365 for d in result_docs:366 new_s = d["top_hit"]["value"]367 if s is not None:368 assert new_s <= s369 s = new_s370@pytest.mark.search371@pytest.mark.parametrize("order", [SortOrder.ASCENDING, SortOrder.DESCENDING])372def test_result_order_date(373 test_opensearch, monkeypatch, client, user_token_headers, order374):375 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)376 response = client.post(377 "/api/v1/searches",378 json={379 "query_string": "climate",380 "exact_match": False,381 "sort_field": "date",382 "sort_order": order.value,383 },384 headers=user_token_headers,385 )386 assert response.status_code == 200387 response_body = response.json()388 documents = response_body["documents"]389 assert len(documents) > 1390 dt = None391 for d in documents:392 new_dt = datetime.strptime(d["document_date"], "%d/%m/%Y")393 if dt is not None:394 if order == SortOrder.DESCENDING:395 assert new_dt <= dt396 if order == SortOrder.ASCENDING:397 assert new_dt >= dt398 dt = new_dt399@pytest.mark.search400@pytest.mark.parametrize("order", [SortOrder.ASCENDING, SortOrder.DESCENDING])401def test_result_order_title(402 test_opensearch, monkeypatch, client, user_token_headers, order403):404 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)405 response = client.post(406 "/api/v1/searches",407 json={408 "query_string": "climate",409 "exact_match": False,410 "sort_field": "title",411 "sort_order": order.value,412 },413 headers=user_token_headers,414 )415 assert response.status_code == 200416 response_body = response.json()417 documents = response_body["documents"]418 assert len(documents) > 1419 t = None420 for d in documents:421 new_t = d["document_name"]422 if t is not None:423 if order == SortOrder.DESCENDING:424 assert new_t <= t425 if order == SortOrder.ASCENDING:426 assert new_t >= t427 t = new_t428@pytest.mark.search429def test_invalid_request(test_opensearch, monkeypatch, client, user_token_headers):430 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)431 response = client.post(432 "/api/v1/searches",433 json={"exact_match": False},434 headers=user_token_headers,435 )436 assert response.status_code == 422437 response = client.post(438 "/api/v1/searches",439 json={"limit": 1, "offset": 2},440 headers=user_token_headers,441 )442 assert response.status_code == 422443 response = client.post(444 "/api/v1/searches",445 json={},446 headers=user_token_headers,447 )448 assert response.status_code == 422449@pytest.mark.search450def test_case_insensitivity(test_opensearch, monkeypatch, client, user_token_headers):451 """Make sure that query string results are not affected by case."""452 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)453 response1 = client.post(454 "/api/v1/searches",455 json={"query_string": "climate", "exact_match": False},456 headers=user_token_headers,457 )458 response2 = client.post(459 "/api/v1/searches",460 json={"query_string": "ClImAtE", "exact_match": False},461 headers=user_token_headers,462 )463 response3 = client.post(464 "/api/v1/searches",465 json={"query_string": "CLIMATE", "exact_match": False},466 headers=user_token_headers,467 )468 response1_json = response1.json()469 del response1_json["query_time_ms"]470 response2_json = response2.json()471 del response2_json["query_time_ms"]472 response3_json = response3.json()473 del response3_json["query_time_ms"]474 assert response1_json["documents"]475 assert response1_json == response2_json == response3_json476@pytest.mark.search477def test_punctuation_ignored(test_opensearch, monkeypatch, client, user_token_headers):478 """Make sure that punctuation in query strings is ignored."""479 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)480 response1 = client.post(481 "/api/v1/searches",482 json={"query_string": "climate.", "exact_match": False},483 headers=user_token_headers,484 )485 response2 = client.post(486 "/api/v1/searches",487 json={"query_string": "climate, ", "exact_match": False},488 headers=user_token_headers,489 )490 response3 = client.post(491 "/api/v1/searches",492 json={"query_string": ";climate", "exact_match": False},493 headers=user_token_headers,494 )495 response1_json = response1.json()496 del response1_json["query_time_ms"]497 response2_json = response2.json()498 del response2_json["query_time_ms"]499 response3_json = response3.json()500 del response3_json["query_time_ms"]501 assert response1_json["documents"]502 assert response1_json == response2_json == response3_json503@pytest.mark.search504def test_accents_ignored(test_opensearch, monkeypatch, client, user_token_headers):505 """Make sure that accents in query strings are ignored."""506 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)507 response1 = client.post(508 "/api/v1/searches",509 json={"query_string": "climà te", "exact_match": False},510 headers=user_token_headers,511 )512 response2 = client.post(513 "/api/v1/searches",514 json={"query_string": "climatë", "exact_match": False},515 headers=user_token_headers,516 )517 response3 = client.post(518 "/api/v1/searches",519 json={"query_string": "climà të", "exact_match": False},520 headers=user_token_headers,521 )522 response1_json = response1.json()523 del response1_json["query_time_ms"]524 response2_json = response2.json()525 del response2_json["query_time_ms"]526 response3_json = response3.json()527 del response3_json["query_time_ms"]528 assert response1_json["documents"]529 assert response1_json == response2_json == response3_json530@pytest.mark.search531def test_unauthenticated(client):532 """Make sure that unauthenticated requests are denied correctly."""533 response = client.post(534 "/api/v1/searches",535 json={"query_string": "a", "exact_match": True},536 )537 assert response.status_code == 401538@pytest.mark.search539def test_time_taken(test_opensearch, monkeypatch, client, user_token_headers):540 """Make sure that query time taken is sensible."""541 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)542 start = time.time()543 response = client.post(544 "/api/v1/searches",545 json={"query_string": "disaster", "exact_match": False},546 headers=user_token_headers,547 )548 end = time.time()549 assert response.status_code == 200550 response_json = response.json()551 reported_response_time_ms = response_json["query_time_ms"]552 expected_response_time_ms_max = 1000 * (end - start)553 assert 0 < reported_response_time_ms < expected_response_time_ms_max554@pytest.mark.search555def test_empty_search_term_performs_browse(556 test_opensearch,557 monkeypatch,558 client,559 user_token_headers,560):561 """Make sure that empty search terms return no results."""562 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)563 response = client.post(564 "/api/v1/searches",565 json={"query_string": ""},566 headers=user_token_headers,567 )568 assert response.status_code == 200569 assert response.json()["hits"] == _TOTAL_DOCUMENT_COUNT570@pytest.mark.search571@pytest.mark.parametrize("order", [SortOrder.ASCENDING, SortOrder.DESCENDING])572def test_browse_order_by_title(573 test_opensearch,574 monkeypatch,575 client,576 user_token_headers,577 order,578):579 """Make sure that empty search terms return no results."""580 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)581 response = client.post(582 "/api/v1/searches",583 json={584 "query_string": "",585 "sort_field": "title",586 "sort_order": order.value,587 },588 headers=user_token_headers,589 )590 assert response.status_code == 200591 response_body = response.json()592 documents = response_body["documents"]593 assert len(documents) == _TOTAL_DOCUMENT_COUNT594 t = None595 for d in documents:596 new_t = d["document_name"]597 if t is not None:598 if order == SortOrder.DESCENDING:599 assert new_t <= t600 if order == SortOrder.ASCENDING:601 assert new_t >= t602 t = new_t603@pytest.mark.search604@pytest.mark.parametrize("order", [SortOrder.ASCENDING, SortOrder.DESCENDING])605def test_browse_order_by_date(606 test_opensearch,607 monkeypatch,608 client,609 user_token_headers,610 order,611):612 """Make sure that empty search terms return no results."""613 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)614 response = client.post(615 "/api/v1/searches",616 json={617 "query_string": "",618 "sort_field": "date",619 "sort_order": order.value,620 },621 headers=user_token_headers,622 )623 assert response.status_code == 200624 response_body = response.json()625 documents = response_body["documents"]626 assert len(documents) == _TOTAL_DOCUMENT_COUNT627 dt = None628 for d in documents:629 new_dt = datetime.strptime(d["document_date"], "%d/%m/%Y")630 if dt is not None:631 if order == SortOrder.DESCENDING:632 assert new_dt <= dt633 if order == SortOrder.ASCENDING:634 assert new_dt >= dt635 dt = new_dt636@pytest.mark.search637@pytest.mark.parametrize("limit", [1, 4, 7, 10])638@pytest.mark.parametrize("offset", [0, 1, 7, 10])639def test_browse_limit_offset(640 test_opensearch,641 monkeypatch,642 client,643 user_token_headers,644 limit,645 offset,646):647 """Make sure that empty search terms return no results."""648 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)649 response = client.post(650 "/api/v1/searches",651 json={"query_string": "", "limit": limit, "offset": offset},652 headers=user_token_headers,653 )654 assert response.status_code == 200655 response_body = response.json()656 documents = response_body["documents"]657 assert len(documents) == min(limit, max(0, _TOTAL_DOCUMENT_COUNT - offset))658@pytest.mark.search659def test_browse_filters(660 test_opensearch, monkeypatch, client, user_token_headers, mocker661):662 """Check that multiple filters are successfully applied"""663 monkeypatch.setattr(search, "_OPENSEARCH_CONNECTION", test_opensearch)664 query_spy = mocker.spy(search._OPENSEARCH_CONNECTION, "raw_query")665 response = client.post(666 "/api/v1/searches",667 json={668 "query_string": "",669 "keyword_filters": {670 "countries": ["Kenya"],671 "sources": ["CCLW"],672 },673 "year_range": (1900, 2020),674 "jit_query": "disabled",675 },676 headers=user_token_headers,677 )678 assert response.status_code == 200679 assert query_spy.call_count == 1680 query_body = query_spy.mock_calls[0].args[0]681 assert {682 "terms": {_FILTER_FIELD_MAP[FilterField("countries")]: ["Kenya"]}683 } in query_body["query"]["bool"]["filter"]684 assert {685 "terms": {_FILTER_FIELD_MAP[FilterField("sources")]: ["CCLW"]}686 } in query_body["query"]["bool"]["filter"]687 assert {688 "range": {"document_date": {"gte": "01/01/1900", "lte": "31/12/2020"}}689 } in query_body["query"]["bool"]["filter"]690 response_body = response.json()691 documents = response_body["documents"]...
test_routes.py
Source:test_routes.py
...18 assert rv._status_code == 20019 config = json.loads(rv.data)20 for key in demo_config.keys():21 assert config[key] == demo_config[key]22def test_opensearch(client):23 rv = client.get('/opensearch.xml')24 assert rv._status_code == 200...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!