diff --git a/.circleci/config.yml b/.circleci/config.yml index 313f5b5fdd..3756d27f9b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -105,7 +105,7 @@ jobs: - run: name: Run tests # Use built-in Django test module - command: coverage run --source='.' --rcfile=.coveragerc manage.py test --keepdb + command: coverage run --source='.' --rcfile=.coveragerc manage.py test working_directory: ~/project/django-backend/ - run: @@ -156,7 +156,7 @@ jobs: command: | flake8 --config django-backend/.flake8 - deploy: + deploy-job: docker: - image: cimg/python:3.10 @@ -174,7 +174,7 @@ jobs: export PATH=$HOME/bin:$PATH curl -L "https://cli.run.pivotal.io/stable?release=linux64-binary&version=7.1.0" | tar xzv -C $HOME/bin - - deploy: + - run: name: Deploy API command: | export PATH=$HOME/bin:$PATH @@ -244,7 +244,7 @@ workflows: jobs: - test - dependency-check - - deploy: + - deploy-job: requires: - test - dependency-check diff --git a/.gitignore b/.gitignore index a30200da4a..b733090575 100644 --- a/.gitignore +++ b/.gitignore @@ -62,6 +62,8 @@ django-backend/.local/ *.bower-registry *.bower-tmp +# Locust Testing Data +locust-testing/locust-data/*.locust.json # System Files @@ -74,4 +76,4 @@ front-end/.vscode/launch.json .cfmeta # Virtual environment -.venv/ \ No newline at end of file +.venv/ diff --git a/.safety.dependency.ignore b/.safety.dependency.ignore index d0a6b203c7..d47fa7a176 100644 --- a/.safety.dependency.ignore +++ b/.safety.dependency.ignore @@ -8,6 +8,5 @@ # Example: # 40104 2022-01-15 # -63687 2024-05-01 # gitpython <3.1.41 -64227 2024-05-01 # jinja2 <3.1.3 -64976 2024-05-01 # django < 4.2.10 +65771 2024-06-29 # https://github.com/fecgov/fecfile-web-api/issues/803 + diff --git a/Dockerfile b/Dockerfile index 38e0250995..5f432fb891 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,4 +12,4 @@ RUN useradd nxgu --no-create-home --home /opt/nxg_fec && chown -R nxgu:nxgu /opt USER nxgu EXPOSE 8080 -ENTRYPOINT ["/bin/sh", "-c", "python wait_for_db.py && python manage.py migrate && python manage.py loaddata fixtures/e2e-test-data.json && python manage.py load_committee_data && python manage.py create_committee_views && gunicorn --bind 0.0.0.0:8080 fecfiler.wsgi -w 9 --reload"] +ENTRYPOINT ["/bin/sh", "-c", "python wait_for_db.py && python manage.py migrate && python manage.py loaddata fixtures/e2e-test-data.json && python manage.py load_committee_data && python manage.py create_committee_views && gunicorn --bind 0.0.0.0:8080 fecfiler.wsgi -w 9 --reload"] \ No newline at end of file diff --git a/Dockerfile-e2e b/Dockerfile-e2e index ba480db1cb..589ff997da 100644 --- a/Dockerfile-e2e +++ b/Dockerfile-e2e @@ -13,4 +13,4 @@ RUN useradd nxgu --no-create-home --home /opt/nxg_fec_e2e && chown -R nxgu:nxgu USER nxgu EXPOSE 8080 -ENTRYPOINT ["/bin/sh", "-c", "python wait_for_db.py && python manage.py migrate && python manage.py loaddata fixtures/e2e-test-data.json && python manage.py load_committee_data && python manage.py create_committee_views && gunicorn --bind 0.0.0.0:8080 fecfiler.wsgi -w 9 --reload"] +ENTRYPOINT ["/bin/sh", "-c", "python wait_for_db.py && python manage.py migrate && python manage.py loaddata fixtures/e2e-test-data.json && python manage.py load_committee_data && python manage.py create_committee_views && gunicorn --bind 0.0.0.0:8080 fecfiler.wsgi -w 9 --reload"] \ No newline at end of file diff --git a/django-backend/fecfiler/mock_openfec/mock_endpoints.py b/django-backend/fecfiler/mock_openfec/mock_endpoints.py index a91a8887e8..11aeb958ed 100644 --- a/django-backend/fecfiler/mock_openfec/mock_endpoints.py +++ b/django-backend/fecfiler/mock_openfec/mock_endpoints.py @@ -7,6 +7,35 @@ redis_instance = redis.Redis.from_url(MOCK_OPENFEC_REDIS_URL) +def committee(committee_id): + if redis_instance: + committee_data = redis_instance.get(COMMITTEE_DATA_REDIS_KEY) or "" + committees = json.loads(committee_data) or [] + committee = next( + ( + committee + for committee in committees + if committee.get("committee_id") == committee_id + ), + None, + ) + if committee: + # rename key so we can use same mock data for both + # query_filings and committee details endpoints + committee['name'] = committee.pop('committee_name') + return { # same as api.open.fec.gov + "api_version": "1.0", + "results": [committee], + "pagination": { + "pages": 1, + "per_page": 20, + "count": 1, + "page": 1, + }, + } + return None + + def query_filings(query, form_type): if redis_instance: committee_data = redis_instance.get(COMMITTEE_DATA_REDIS_KEY) or "" diff --git a/django-backend/fecfiler/openfec/test_efo_mock_data/committee_accounts.json b/django-backend/fecfiler/openfec/test_efo_mock_data/committee_accounts.json deleted file mode 100644 index 9145bcc7f4..0000000000 --- a/django-backend/fecfiler/openfec/test_efo_mock_data/committee_accounts.json +++ /dev/null @@ -1,603 +0,0 @@ -[ - { - "committee_id": "C00100214", - "name": "TEST_COMMITTEE_NAME3", - "committee_type_full": "TEST_COMMITTEE_TYPE3", - "street_1": "TEST_SA3", - "city": "TEST_CITY3", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn3", - "treasurer_name_2": "test_ln3", - "treasurer_name_middle": "test_mn3", - "treasurer_name_prefix": "test_p3", - "treasurer_name_suffix": "test_s3", - "treasurer_street_1": "test_a3", - "treasurer_city": "test_c3", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title3", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln3, test_fn3", - "custodian_name_1": "test_fn3", - "custodian_name_2": "test_ln3", - "custodian_street_1": "test_a3", - "custodian_city": "test_c3", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title3", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100222", - "name": "TEST_COMMITTEE_NAME4", - "committee_type_full": "TEST_COMMITTEE_TYPE4", - "street_1": "TEST_SA4", - "city": "TEST_CITY4", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn4", - "treasurer_name_2": "test_ln4", - "treasurer_name_middle": "test_mn4", - "treasurer_name_prefix": "test_p4", - "treasurer_name_suffix": "test_s4", - "treasurer_street_1": "test_a4", - "treasurer_city": "test_c4", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title4", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln4, test_fn4", - "custodian_name_1": "test_fn4", - "custodian_name_2": "test_ln4", - "custodian_street_1": "test_a4", - "custodian_city": "test_c4", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title4", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100230", - "name": "TEST_COMMITTEE_NAME5", - "committee_type_full": "TEST_COMMITTEE_TYPE5", - "street_1": "TEST_SA5", - "city": "TEST_CITY5", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn5", - "treasurer_name_2": "test_ln5", - "treasurer_name_middle": "test_mn5", - "treasurer_name_prefix": "test_p5", - "treasurer_name_suffix": "test_s5", - "treasurer_street_1": "test_a5", - "treasurer_city": "test_c5", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title5", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln5, test_fn5", - "custodian_name_1": "test_fn5", - "custodian_name_2": "test_ln5", - "custodian_street_1": "test_a5", - "custodian_city": "test_c5", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title5", - "custodian_phone": "8175212197", - "committee_type": "Q" - }, - { - "committee_id": "C00100248", - "name": "TEST_COMMITTEE_NAME6", - "committee_type_full": "TEST_COMMITTEE_TYPE6", - "street_1": "TEST_SA6", - "city": "TEST_CITY6", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn6", - "treasurer_name_2": "test_ln6", - "treasurer_name_middle": "test_mn6", - "treasurer_name_prefix": "test_p6", - "treasurer_name_suffix": "test_s6", - "treasurer_street_1": "test_a6", - "treasurer_city": "test_c6", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title6", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln6, test_fn6", - "custodian_name_1": "test_fn6", - "custodian_name_2": "test_ln6", - "custodian_street_1": "test_a6", - "custodian_city": "test_c6", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title6", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100255", - "name": "TEST_COMMITTEE_NAME7", - "committee_type_full": "TEST_COMMITTEE_TYPE7", - "street_1": "TEST_SA7", - "city": "TEST_CITY7", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn7", - "treasurer_name_2": "test_ln7", - "treasurer_name_middle": "test_mn7", - "treasurer_name_prefix": "test_p7", - "treasurer_name_suffix": "test_s7", - "treasurer_street_1": "test_a7", - "treasurer_city": "test_c7", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title7", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln7, test_fn7", - "custodian_name_1": "test_fn7", - "custodian_name_2": "test_ln7", - "custodian_street_1": "test_a7", - "custodian_city": "test_c7", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title7", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100263", - "name": "TEST_COMMITTEE_NAME8", - "committee_type_full": "TEST_COMMITTEE_TYPE8", - "street_1": "TEST_SA8", - "city": "TEST_CITY8", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn8", - "treasurer_name_2": "test_ln8", - "treasurer_name_middle": "test_mn8", - "treasurer_name_prefix": "test_p8", - "treasurer_name_suffix": "test_s8", - "treasurer_street_1": "test_a8", - "treasurer_city": "test_c8", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title8", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln8, test_fn8", - "custodian_name_1": "test_fn8", - "custodian_name_2": "test_ln8", - "custodian_street_1": "test_a8", - "custodian_city": "test_c8", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title8", - "custodian_phone": "8175212197", - "committee_type": "Q" - }, - { - "committee_id": "C00100271", - "name": "TEST_COMMITTEE_NAME9", - "committee_type_full": "TEST_COMMITTEE_TYPE9", - "street_1": "TEST_SA9", - "city": "TEST_CITY9", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn9", - "treasurer_name_2": "test_ln9", - "treasurer_name_middle": "test_mn9", - "treasurer_name_prefix": "test_p9", - "treasurer_name_suffix": "test_s9", - "treasurer_street_1": "test_a9", - "treasurer_city": "test_c9", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title9", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln9, test_fn9", - "custodian_name_1": "test_fn9", - "custodian_name_2": "test_ln9", - "custodian_street_1": "test_a9", - "custodian_city": "test_c9", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title9", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100289", - "name": "TEST_COMMITTEE_NAME10", - "committee_type_full": "TEST_COMMITTEE_TYPE10", - "street_1": "TEST_SA10", - "city": "TEST_CITY10", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn10", - "treasurer_name_2": "test_ln10", - "treasurer_name_middle": "test_mn10", - "treasurer_name_prefix": "test_p10", - "treasurer_name_suffix": "test_s10", - "treasurer_street_1": "test_a10", - "treasurer_city": "test_c10", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title10", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln10, test_fn10", - "custodian_name_1": "test_fn10", - "custodian_name_2": "test_ln10", - "custodian_street_1": "test_a10", - "custodian_city": "test_c10", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title10", - "custodian_phone": "8175212197", - "committee_type": "H" - }, - { - "committee_id": "C00100297", - "name": "TEST_COMMITTEE_NAME11", - "committee_type_full": "TEST_COMMITTEE_TYPE11", - "street_1": "TEST_SA11", - "city": "TEST_CITY11", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn11", - "treasurer_name_2": "test_ln11", - "treasurer_name_middle": "test_mn11", - "treasurer_name_prefix": "test_p11", - "treasurer_name_suffix": "test_s11", - "treasurer_street_1": "test_a11", - "treasurer_city": "test_c11", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title11", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln11, test_fn11", - "custodian_name_1": "test_fn11", - "custodian_name_2": "test_ln11", - "custodian_street_1": "test_a11", - "custodian_city": "test_c11", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title11", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100461", - "name": "TEST_COMMITTEE_NAME13", - "committee_type_full": "TEST_COMMITTEE_TYPE13", - "street_1": "TEST_SA13", - "city": "TEST_CITY13", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn13", - "treasurer_name_2": "test_ln13", - "treasurer_name_middle": "test_mn13", - "treasurer_name_prefix": "test_p13", - "treasurer_name_suffix": "test_s13", - "treasurer_street_1": "test_a13", - "treasurer_city": "test_c13", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title13", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln13, test_fn13", - "custodian_name_1": "test_fn13", - "custodian_name_2": "test_ln13", - "custodian_street_1": "test_a13", - "custodian_city": "test_c13", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title13", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100479", - "name": "TEST_COMMITTEE_NAME14", - "committee_type_full": "TEST_COMMITTEE_TYPE14", - "street_1": "TEST_SA14", - "city": "TEST_CITY14", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn14", - "treasurer_name_2": "test_ln14", - "treasurer_name_middle": "test_mn14", - "treasurer_name_prefix": "test_p14", - "treasurer_name_suffix": "test_s14", - "treasurer_street_1": "test_a14", - "treasurer_city": "test_c14", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title14", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln14, test_fn14", - "custodian_name_1": "test_fn14", - "custodian_name_2": "test_ln14", - "custodian_street_1": "test_a14", - "custodian_city": "test_c14", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title14", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100487", - "name": "TEST_COMMITTEE_NAME15", - "committee_type_full": "TEST_COMMITTEE_TYPE15", - "street_1": "TEST_SA15", - "city": "TEST_CITY15", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn15", - "treasurer_name_2": "test_ln15", - "treasurer_name_middle": "test_mn15", - "treasurer_name_prefix": "test_p15", - "treasurer_name_suffix": "test_s15", - "treasurer_street_1": "test_a15", - "treasurer_city": "test_c15", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title15", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln15, test_fn15", - "custodian_name_1": "test_fn15", - "custodian_name_2": "test_ln15", - "custodian_street_1": "test_a15", - "custodian_city": "test_c15", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title15", - "custodian_phone": "8175212197", - "committee_type": "Q" - }, - { - "committee_id": "C00100495", - "name": "TEST_COMMITTEE_NAME16", - "committee_type_full": "TEST_COMMITTEE_TYPE16", - "street_1": "TEST_SA16", - "city": "TEST_CITY16", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn16", - "treasurer_name_2": "test_ln16", - "treasurer_name_middle": "test_mn16", - "treasurer_name_prefix": "test_p16", - "treasurer_name_suffix": "test_s16", - "treasurer_street_1": "test_a16", - "treasurer_city": "test_c16", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title16", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln16, test_fn16", - "custodian_name_1": "test_fn16", - "custodian_name_2": "test_ln16", - "custodian_street_1": "test_a16", - "custodian_city": "test_c16", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title16", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100503", - "name": "TEST_COMMITTEE_NAME17", - "committee_type_full": "TEST_COMMITTEE_TYPE17", - "street_1": "TEST_SA17", - "city": "TEST_CITY17", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn17", - "treasurer_name_2": "test_ln17", - "treasurer_name_middle": "test_mn17", - "treasurer_name_prefix": "test_p17", - "treasurer_name_suffix": "test_s17", - "treasurer_street_1": "test_a17", - "treasurer_city": "test_c17", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title17", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln17, test_fn17", - "custodian_name_1": "test_fn17", - "custodian_name_2": "test_ln17", - "custodian_street_1": "test_a17", - "custodian_city": "test_c17", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title17", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100511", - "name": "TEST_COMMITTEE_NAME18", - "committee_type_full": "TEST_COMMITTEE_TYPE18", - "street_1": "TEST_SA18", - "city": "TEST_CITY18", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn18", - "treasurer_name_2": "test_ln18", - "treasurer_name_middle": "test_mn18", - "treasurer_name_prefix": "test_p18", - "treasurer_name_suffix": "test_s18", - "treasurer_street_1": "test_a18", - "treasurer_city": "test_c18", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title18", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln18, test_fn18", - "custodian_name_1": "test_fn18", - "custodian_name_2": "test_ln18", - "custodian_street_1": "test_a18", - "custodian_city": "test_c18", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title18", - "custodian_phone": "8175212197", - "committee_type": "Q" - }, - { - "committee_id": "C00100529", - "name": "TEST_COMMITTEE_NAME19", - "committee_type_full": "TEST_COMMITTEE_TYPE19", - "street_1": "TEST_SA19", - "city": "TEST_CITY19", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn19", - "treasurer_name_2": "test_ln19", - "treasurer_name_middle": "test_mn19", - "treasurer_name_prefix": "test_p19", - "treasurer_name_suffix": "test_s19", - "treasurer_street_1": "test_a19", - "treasurer_city": "test_c19", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title19", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln19, test_fn19", - "custodian_name_1": "test_fn19", - "custodian_name_2": "test_ln19", - "custodian_street_1": "test_a19", - "custodian_city": "test_c19", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title19", - "custodian_phone": "8175212197", - "committee_type": "W" - }, - { - "committee_id": "C00100537", - "name": "TEST_COMMITTEE_NAME20", - "committee_type_full": "TEST_COMMITTEE_TYPE20", - "street_1": "TEST_SA20", - "city": "TEST_CITY20", - "state": "AL", - "zip": "12345", - "email": "fakeemail@test.com", - "treasurer_name_1": "test_fn20", - "treasurer_name_2": "test_ln20", - "treasurer_name_middle": "test_mn20", - "treasurer_name_prefix": "test_p20", - "treasurer_name_suffix": "test_s20", - "treasurer_street_1": "test_a20", - "treasurer_city": "test_c20", - "treasurer_state": "AL", - "treasurer_zip": "12345", - "treasurer_name_title": "test_title20", - "treasurer_phone": "8175212197", - "custodian_name_full": "test_ln20, test_fn20", - "custodian_name_1": "test_fn20", - "custodian_name_2": "test_ln20", - "custodian_street_1": "test_a20", - "custodian_city": "test_c20", - "custodian_state": "AL", - "custodian_zip": "12345", - "custodian_name_title": "test_title20", - "custodian_phone": "8175212197", - "committee_type": "Y" - }, - { - "committee_id": "C00100362", - "name": "Committee to elect Test Account1", - "committee_type_full": "PAC - Nonqualified", - "street_1": "123 Main Street", - "city": "Nashville", - "state": "TN", - "zip": "37206", - "email": "fakeemail@testaccount.com", - "treasurer_name_1": "Sarah", - "treasurer_name_2": "Jarrett", - "treasurer_name_middle": "test_mn", - "treasurer_name_prefix": "test_p", - "treasurer_name_suffix": "test_s", - "treasurer_street_1": "1313 Jarrett Drive", - "treasurer_city": "Springfield", - "treasurer_state": "TN", - "treasurer_zip": "37172", - "treasurer_name_title": "CFO", - "treasurer_phone": "8175212197", - "custodian_name_full": "Mitchell, Scott", - "custodian_name_1": "Scott", - "custodian_name_2": "Mitchell", - "custodian_street_1": "321 Charlotte Avenue", - "custodian_city": "Nashville", - "custodian_state": "TN", - "custodian_zip": "37208", - "custodian_name_title": "President", - "custodian_phone": "8175212197" - }, - { - "committee_id": "C00100933", - "name": "Sarah for Congress", - "committee_type_full": "House", - "street_1": "1050 First Street, NE ", - "city": "Washington", - "state": "District of Columbia", - "zip": "20002", - "email": "fakeemail@testaccount.com", - "treasurer_name_1": "Ryan", - "treasurer_name_2": "Lanz", - "treasurer_street_1": "1 Apple Park Way", - "treasurer_city": "Cupertino", - "treasurer_state": "CA", - "treasurer_zip": "95014", - "treasurer_phone": "8175212197", - "custodian_name_full": "Clark, Paul", - "custodian_name_1": "Paul", - "custodian_name_2": "Clark", - "custodian_street_1": "1 Apple Park Way", - "custodian_city": "Cupertino", - "custodian_state": "CA", - "custodian_zip": "95014", - "custodian_name_title": "Custodian of record", - "filing_frequency": "A", - "committee_type": "H" - }, - { - "committee_id": "C00100941", - "name": "Michael for Congress", - "committee_type_full": "House", - "street_1": "1050 First Street, NE ", - "city": "Washington", - "state": "District of Columbia", - "zip": "20002", - "email": "fakeemail@testaccount.com", - "treasurer_name_1": "Ryan", - "treasurer_name_2": "Lanz", - "treasurer_street_1": "1 Apple Park Way", - "treasurer_city": "Cupertino", - "treasurer_state": "CA", - "treasurer_zip": "95014", - "treasurer_phone": "8175212197", - "custodian_name_full": "Colucci, Marlene", - "custodian_name_1": "Marlene", - "custodian_name_2": "Colucci", - "custodian_street_1": "1 Apple Park Way", - "custodian_city": "Cupertino", - "custodian_state": "CA", - "custodian_zip": "95014", - "custodian_name_title": "Custodian of record", - "filing_frequency": "A", - "committee_type": "H" - } -] \ No newline at end of file diff --git a/django-backend/fecfiler/openfec/test_views.py b/django-backend/fecfiler/openfec/test_views.py index 835084f386..5066a0a4bd 100644 --- a/django-backend/fecfiler/openfec/test_views.py +++ b/django-backend/fecfiler/openfec/test_views.py @@ -25,33 +25,28 @@ def test_get_committee_no_override(self): def test_get_committee_override_data_not_found(self): with patch("fecfiler.openfec.views.settings") as settings: - settings.FEC_API_COMMITTEE_LOOKUP_IDS_OVERRIDE = "C12345678" settings.BASE_DIR = "fecfiler/" - request = self.factory.get("/api/v1/openfec/C12345678/committee/") + request = self.factory.get("/api/v1/openfec/C87654321/committee/") request.user = self.user response = OpenfecViewSet.as_view({"get": "committee"})( - request, pk="C12345678" + request, pk="C87654321" ) self.assertEqual(response.status_code, 500) def test_get_committee_override_happy_path(self): with patch("fecfiler.openfec.views.settings") as settings: - settings.FEC_API_COMMITTEE_LOOKUP_IDS_OVERRIDE = "C00100230" settings.BASE_DIR = "fecfiler/" - request = self.factory.get("/api/v1/openfec/C00100230/committee/") + request = self.factory.get("/api/v1/openfec/C12345678/committee/") request.user = self.user response = OpenfecViewSet.as_view({"get": "committee"})( - request, pk="C00100230" + request, pk="C12345678" ) self.assertEqual(response.status_code, 200) - self.assertEqual(response.data["results"][0]["committee_id"], "C00100230") - self.assertEqual( - response.data["results"][0]["name"], "TEST_COMMITTEE_NAME5" - ) + self.assertEqual(response.data["results"][0]["committee_id"], "C12345678") self.assertEqual( - response.data["results"][0]["committee_type_full"], - "TEST_COMMITTEE_TYPE5", + response.data["results"][0]["name"], "Test Committee" ) + self.assertEqual(response.data["results"][0]["committee_type"], "O") def test_get_filings_invalid_resp(self): request = self.factory.get("/api/v1/openfec/C00100230/f1_filing/") diff --git a/django-backend/fecfiler/openfec/views.py b/django-backend/fecfiler/openfec/views.py index 4a62fe8da9..a6e933a93e 100644 --- a/django-backend/fecfiler/openfec/views.py +++ b/django-backend/fecfiler/openfec/views.py @@ -1,13 +1,11 @@ from rest_framework import viewsets -from django.http.response import HttpResponse, HttpResponseServerError +from django.http.response import HttpResponse from rest_framework.response import Response from rest_framework.decorators import action -from fecfiler.mock_openfec.mock_endpoints import query_filings +from fecfiler.mock_openfec.mock_endpoints import query_filings, committee import requests import fecfiler.settings as settings -import os -import json import structlog logger = structlog.get_logger(__name__) @@ -16,46 +14,13 @@ class OpenfecViewSet(viewsets.GenericViewSet): @action(detail=True) def committee(self, request, pk=None): - cids_to_override = ( - list( - map( - str.strip, settings.FEC_API_COMMITTEE_LOOKUP_IDS_OVERRIDE.split(",") - ) - ) - if settings.FEC_API_COMMITTEE_LOOKUP_IDS_OVERRIDE - else [] + response = committee(pk) + if response: + return Response(response) + response = requests.get( + f"{settings.FEC_API}committee/{pk}/?api_key={settings.FEC_API_KEY}" ) - cid_to_override = next((cid for cid in cids_to_override if cid == pk), None) - if cid_to_override: - mock_committee_account = get_test_efo_mock_committee_account( - cid_to_override - ) - if mock_committee_account: - return Response( - { # same as api.open.fec.gov - "api_version": "1.0", - "results": [ - mock_committee_account, - ], - "pagination": { - "pages": 1, - "per_page": 20, - "count": 1, - "page": 1, - }, - } - ) - else: - logger.error( - "Failed to find mock committee account data for " - "committee id to override: " + cid_to_override - ) - return HttpResponseServerError() - else: - resp = requests.get( - f"{settings.FEC_API}committee/{pk}/?api_key={settings.FEC_API_KEY}" - ) - return HttpResponse(resp) + return HttpResponse(response) @action(detail=True) def f1_filing(self, request, pk=None): @@ -99,24 +64,3 @@ def retrieve_recent_f1(committee_id): results = response["results"] if len(results) > 0: return results[0] - - -def get_test_efo_mock_committee_account(committee_id): - mock_committee_accounts = get_test_efo_mock_committee_accounts() - return next( - ( - committee - for committee in mock_committee_accounts - if committee["committee_id"] == committee_id - ), - None, - ) - - -def get_test_efo_mock_committee_accounts(): - mock_committee_accounts_file = "committee_accounts.json" - mock_committee_accounts_file_path = os.path.join( - settings.BASE_DIR, "openfec/test_efo_mock_data/", mock_committee_accounts_file - ) - with open(mock_committee_accounts_file_path) as fp: - return json.load(fp) diff --git a/django-backend/fecfiler/reports/form_3x/serializers.py b/django-backend/fecfiler/reports/form_3x/serializers.py index 05f407c6c8..bf45967b86 100644 --- a/django-backend/fecfiler/reports/form_3x/serializers.py +++ b/django-backend/fecfiler/reports/form_3x/serializers.py @@ -398,4 +398,4 @@ class Meta(ReportSerializer.Meta): + ["fields_to_validate", "is_first"] ) - read_only_fields = ["id", "deleted", "created", "updated", "is_first"] + read_only_fields = ["id", "created", "updated", "is_first"] diff --git a/django-backend/fecfiler/reports/form_3x/tests/test_serializers.py b/django-backend/fecfiler/reports/form_3x/tests/test_serializers.py index 478d0d3313..ba333f8ac3 100644 --- a/django-backend/fecfiler/reports/form_3x/tests/test_serializers.py +++ b/django-backend/fecfiler/reports/form_3x/tests/test_serializers.py @@ -5,12 +5,16 @@ ) from fecfiler.user.models import User from rest_framework.request import Request, HttpRequest +from fecfiler.reports.tests.utils import create_form3x +from fecfiler.committee_accounts.models import CommitteeAccount +from fecfiler.web_services.models import FECStatus, FECSubmissionState, UploadSubmission class F3XSerializerTestCase(TestCase): fixtures = ["C01234567_user_and_committee"] def setUp(self): + self.committee = CommitteeAccount.objects.create(committee_id="C00000000") self.valid_f3x_report = { "form_type": "F3XN", "treasurer_last_name": "Validlastname", @@ -66,3 +70,29 @@ def test_used_report_code(self): self.assertRaises( type(COVERAGE_DATE_REPORT_CODE_COLLISION), valid_serializer.save ) + + def test_get_status_mapping(self): + valid_serializer = Form3XSerializer( + data=self.valid_f3x_report, + context={"request": self.mock_request}, + ) + f3x_report = create_form3x(self.committee, "2024-01-01", "2024-02-01", {}) + valid_serializer.is_valid() + representation = valid_serializer.to_representation(f3x_report) + self.assertEquals(representation["report_status"], "In progress") + + f3x_report.upload_submission = UploadSubmission() + representation = valid_serializer.to_representation(f3x_report) + self.assertEquals(representation["report_status"], "Submission pending") + + f3x_report.upload_submission.fec_status = FECStatus.ACCEPTED + representation = valid_serializer.to_representation(f3x_report) + self.assertEquals(representation["report_status"], "Submission success") + + f3x_report.upload_submission.fec_status = FECSubmissionState.FAILED + representation = valid_serializer.to_representation(f3x_report) + self.assertEquals(representation["report_status"], "Submission failure") + + f3x_report.upload_submission.fec_status = FECStatus.REJECTED + representation = valid_serializer.to_representation(f3x_report) + self.assertEquals(representation["report_status"], "Submission failure") diff --git a/django-backend/fecfiler/reports/managers.py b/django-backend/fecfiler/reports/managers.py index 7066ab29e6..92297a829b 100644 --- a/django-backend/fecfiler/reports/managers.py +++ b/django-backend/fecfiler/reports/managers.py @@ -1,12 +1,11 @@ -from fecfiler.soft_delete.managers import SoftDeleteManager -from django.db.models import Case, When, Value, OuterRef, Exists +from django.db.models import Case, When, Value, OuterRef, Exists, Manager from enum import Enum """Manager to deterimine fields that are used the same way across reports, but are called different names""" -class ReportManager(SoftDeleteManager): +class ReportManager(Manager): def get_queryset(self): older_f3x = ( super() @@ -20,8 +19,6 @@ def get_queryset(self): queryset = ( super() .get_queryset() - .distinct() # Remove duplicates caused by multiple transaction - # foreign key links .annotate( report_type=Case( When(form_3x__isnull=False, then=ReportType.F3X.value), diff --git a/django-backend/fecfiler/reports/migrations/0007_remove_report_deleted.py b/django-backend/fecfiler/reports/migrations/0007_remove_report_deleted.py new file mode 100644 index 0000000000..e124f11a27 --- /dev/null +++ b/django-backend/fecfiler/reports/migrations/0007_remove_report_deleted.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.10 on 2024-04-05 15:04 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('reports', '0006_reporttransaction'), + ] + + operations = [ + migrations.RemoveField( + model_name='report', + name='deleted', + ), + ] diff --git a/django-backend/fecfiler/reports/models.py b/django-backend/fecfiler/reports/models.py index a7e92da374..7864eb755f 100644 --- a/django-backend/fecfiler/reports/models.py +++ b/django-backend/fecfiler/reports/models.py @@ -1,7 +1,6 @@ import uuid from django.db import models, transaction as db_transaction from django.db.models import Q -from fecfiler.soft_delete.models import SoftDeleteModel from fecfiler.committee_accounts.models import CommitteeOwnedModel from .managers import ReportManager from .form_3x.models import Form3X @@ -13,7 +12,7 @@ logger = structlog.get_logger(__name__) -class Report(SoftDeleteModel, CommitteeOwnedModel): +class Report(CommitteeOwnedModel): """Generated model from json schema""" id = models.UUIDField( @@ -128,6 +127,24 @@ def amend(self): self.upload_submission = None self.save() + def delete(self): + if not self.form_24: + """only delete transactions if the report is the source of the + tranaction""" + from fecfiler.transactions.models import Transaction + + Transaction.objects.filter(reports=self).delete() + + """delete report-transaction links""" + ReportTransaction.objects.filter(report=self).delete() + + for form_key in TABLE_TO_FORM: + form = getattr(self, form_key) + if form: + form.delete() + + super(CommitteeOwnedModel, self).delete() + TABLE_TO_FORM = { "form_3x": "F3X", diff --git a/django-backend/fecfiler/reports/serializers.py b/django-backend/fecfiler/reports/serializers.py index 7378b8f101..564fe313ad 100644 --- a/django-backend/fecfiler/reports/serializers.py +++ b/django-backend/fecfiler/reports/serializers.py @@ -1,4 +1,4 @@ -from .models import Report +from .models import Report, ReportTransaction from rest_framework.serializers import ( ModelSerializer, CharField, @@ -17,42 +17,33 @@ from fecfiler.reports.form_99.models import Form99 from fecfiler.reports.form_1m.models import Form1M from fecfiler.reports.form_1m.utils import add_form_1m_contact_fields +from django.db.models import OuterRef, Subquery, Exists, Q import structlog +from fecfiler.web_services.models import FECSubmissionState, FECStatus logger = structlog.get_logger(__name__) class Form3XSerializer(ModelSerializer): class Meta: - fields = [ - f.name - for f in Form3X._meta.get_fields() - if f.name not in ["deleted", "report"] - ] + fields = [f.name for f in Form3X._meta.get_fields() if f.name not in ["report"]] model = Form3X class Form24Serializer(ModelSerializer): class Meta: - fields = [ - f.name - for f in Form24._meta.get_fields() - if f.name not in ["deleted", "report"] - ] + fields = [f.name for f in Form24._meta.get_fields() if f.name not in ["report"]] model = Form24 class Form99Serializer(ModelSerializer): class Meta: - fields = [ - f.name - for f in Form99._meta.get_fields() - if f.name not in ["deleted", "report"] - ] + fields = [f.name for f in Form99._meta.get_fields() if f.name not in ["report"]] model = Form99 class Form1MSerializer(ModelSerializer): + contact_affiliated_id = UUIDField(allow_null=True, required=False) contact_candidate_I_id = UUIDField(allow_null=True, required=False) # noqa: N815 contact_candidate_II_id = UUIDField(allow_null=True, required=False) # noqa: N815 @@ -60,17 +51,26 @@ class Form1MSerializer(ModelSerializer): contact_candidate_IV_id = UUIDField(allow_null=True, required=False) # noqa: N815 contact_candidate_V_id = UUIDField(allow_null=True, required=False) # noqa: N815 contact_affiliated = ContactSerializer(allow_null=True, required=False) - contact_candidate_I = ContactSerializer(allow_null=True, required=False) # noqa: N815 - contact_candidate_II = ContactSerializer(allow_null=True, required=False) # noqa: N815,E501 - contact_candidate_III = ContactSerializer(allow_null=True, required=False) # noqa: N815,E501 - contact_candidate_IV = ContactSerializer(allow_null=True, required=False) # noqa: N815,E501 - contact_candidate_V = ContactSerializer(allow_null=True, required=False) # noqa: N815 + + contact_candidate_I = ContactSerializer( # noqa: N815 + allow_null=True, required=False + ) + contact_candidate_II = ContactSerializer( # noqa: N815 + allow_null=True, required=False + ) + contact_candidate_III = ContactSerializer( # noqa: N815 + allow_null=True, required=False + ) + contact_candidate_IV = ContactSerializer( # noqa: N815 + allow_null=True, required=False + ) + contact_candidate_V = ContactSerializer( # noqa: N815 + allow_null=True, required=False + ) class Meta: fields = [ - f.name - for f in Form1M._meta.get_fields() - if f.name not in ["deleted", "report"] + f.name for f in Form1M._meta.get_fields() if f.name not in ["report"] ] + [ "contact_affiliated_id", "contact_candidate_I_id", @@ -137,8 +137,59 @@ def to_representation(self, instance, depth=0): this_report = Report.objects.get(id=representation["id"]) representation["is_first"] = this_report.is_first if this_report else True + representation["report_status"] = self.get_status_mapping(instance) + representation["can_delete"] = self.can_delete(representation) + return representation + def can_delete(self, representation): + """can delete if there exist no transactions in this report + where any transactions in a different report back reference to them""" + no_check = ["F24", "F1M", "F99"] + return representation["report_status"] == "In progress" and ( + representation["report_type"] in no_check + or not ( + ReportTransaction.objects.filter( + Exists( + Subquery( + ReportTransaction.objects.filter( + ~Q(report_id=representation["id"]), + Q( + Q(transaction__id=OuterRef("transaction_id")) + | Q( + transaction__reatt_redes_id=OuterRef( + "transaction_id" + ) + ) + | Q( + transaction__parent_transaction_id=OuterRef( + "transaction_id" + ) + ) + | Q(transaction__debt_id=OuterRef("transaction_id")) + | Q(transaction__loan_id=OuterRef("transaction_id")) + ), + ) + ) + ), + report_id=representation["id"], + ).exists() + ) + ) + + def get_status_mapping(self, instance): + if instance.upload_submission is None: + return "In progress" + if instance.upload_submission.fec_status == FECStatus.ACCEPTED: + return "Submission success" + if ( + instance.upload_submission.fec_status == FECSubmissionState.FAILED + or instance.upload_submission.fec_status == FECStatus.REJECTED + ): + return "Submission failure" + + return "Submission pending" + def validate(self, data): self._context = self.context.copy() self._context["fields_to_ignore"] = self._context.get( @@ -155,7 +206,6 @@ def get_fields(): for f in Report._meta.get_fields() if f.name not in [ - "deleted", "uploadsubmission", "webprintsubmission", "committee_name", @@ -163,9 +213,9 @@ def get_fields(): "transaction", "dotfec", "report", - "reporttransaction" + "reporttransaction", ] ] + ["report_status", "fields_to_validate", "report_code_label", "is_first"] fields = get_fields() - read_only_fields = ["id", "deleted", "created", "updated", "is_first"] + read_only_fields = ["id", "created", "updated", "is_first"] diff --git a/django-backend/fecfiler/reports/signals.py b/django-backend/fecfiler/reports/signals.py index cdc5d85282..d1dfb63f01 100644 --- a/django-backend/fecfiler/reports/signals.py +++ b/django-backend/fecfiler/reports/signals.py @@ -19,8 +19,7 @@ def log_post_save(sender, instance, created, **kwargs): action = "updated" if created: action = "created" - elif instance.deleted: - action = "deleted" + logger.info(f"{instance.form_type} Report: {instance.id} was {action}") diff --git a/django-backend/fecfiler/reports/test_views.py b/django-backend/fecfiler/reports/test_views.py index 82a2a01860..6845a4fd0f 100644 --- a/django-backend/fecfiler/reports/test_views.py +++ b/django-backend/fecfiler/reports/test_views.py @@ -2,6 +2,9 @@ from django.test import RequestFactory, TestCase from fecfiler.reports.views import ReportViewSet from fecfiler.user.models import User +import structlog + +logger = structlog.get_logger(__name__) class CommitteeMemberViewSetTest(TestCase): diff --git a/django-backend/fecfiler/reports/tests/test_models.py b/django-backend/fecfiler/reports/tests/test_models.py index 0b9f1b7df0..3fa233bdc4 100644 --- a/django-backend/fecfiler/reports/tests/test_models.py +++ b/django-backend/fecfiler/reports/tests/test_models.py @@ -1,6 +1,14 @@ from django.test import TestCase from fecfiler.web_services.models import UploadSubmission -from fecfiler.reports.models import Report +from fecfiler.reports.models import Report, Form24, Form3X +from fecfiler.reports.tests.utils import create_form3x, create_form24 +from fecfiler.committee_accounts.models import CommitteeAccount +from fecfiler.transactions.tests.utils import create_ie +from fecfiler.contacts.models import Contact +from fecfiler.transactions.models import Transaction +import structlog + +logger = structlog.get_logger(__name__) class ReportModelTestCase(TestCase): @@ -8,6 +16,7 @@ class ReportModelTestCase(TestCase): def setUp(self): self.missing_type_transaction = {} + self.committee = CommitteeAccount.objects.create(committee_id="C00000000") def test_amending(self): f3x_report = Report.objects.get(id="b6d60d2d-d926-4e89-ad4b-c47d152a66ae") @@ -30,3 +39,34 @@ def test_amending_f24(self): f24_report.form_24.original_amendment_date, new_upload_submission.created ) self.assertEquals(f24_report.form_type, "F24A") + + def test_delete(self): + f24_report = create_form24(self.committee, "2024-01-01", "2024-02-01", {}) + f24_report_id = f24_report.id + f24_id = f24_report.form_24.id + f3x_report = create_form3x(self.committee, "2024-01-01", "2024-02-01", {}) + f3x_report_id = f3x_report.id + f3x_id = f3x_report.form_3x.id + candidate_a = Contact.objects.create( + committee_account_id=self.committee.id, + candidate_office="H", + candidate_state="MD", + candidate_district="99", + ) + ie = create_ie(self.committee, candidate_a, "2023-01-01", "123.45", "H2024") + ie.reports.set([f24_report_id, f3x_report_id]) + ie.save() + ie_id = ie.id + + f24_report.delete() + ie = Transaction.all_objects.filter(id=ie_id).first() + self.assertIsNone(ie.deleted) + self.assertFalse(Report.objects.filter(id=f24_report_id).exists()) + self.assertFalse(Form24.objects.filter(id=f24_id).exists()) + + f3x_report.delete() + self.assertFalse(Report.objects.filter(id=f3x_report_id).exists()) + self.assertFalse(Form3X.objects.filter(id=f3x_id).exists()) + + ie = Transaction.all_objects.filter(id=ie_id).first() + self.assertIsNotNone(ie.deleted) diff --git a/django-backend/fecfiler/reports/tests/utils.py b/django-backend/fecfiler/reports/tests/utils.py index 5d38234675..3fa420e9c7 100644 --- a/django-backend/fecfiler/reports/tests/utils.py +++ b/django-backend/fecfiler/reports/tests/utils.py @@ -10,6 +10,10 @@ def create_form3x(committee, coverage_from, coverage_through, data): return create_test_report(Form3X, committee, coverage_from, coverage_through, data) +def create_form24(committee, coverage_from, coverage_through, data): + return create_test_report(Form24, committee, coverage_from, coverage_through, data) + + def create_test_report( form, committee, coverage_from=None, coverage_through=None, data=None ): diff --git a/django-backend/fecfiler/reports/views.py b/django-backend/fecfiler/reports/views.py index 4f7d67a543..12e818be20 100644 --- a/django-backend/fecfiler/reports/views.py +++ b/django-backend/fecfiler/reports/views.py @@ -5,13 +5,13 @@ from fecfiler.committee_accounts.views import CommitteeOwnedViewMixin from .models import Report from fecfiler.transactions.models import Transaction -from fecfiler.web_services.models import FECSubmissionState, FECStatus from fecfiler.memo_text.models import MemoText from fecfiler.web_services.models import DotFEC, UploadSubmission, WebPrintSubmission from .serializers import ReportSerializer -from django.db.models import Case, Value, When, Q, CharField +from django.db.models import Case, Value, When import structlog + logger = structlog.get_logger(__name__) report_code_label_mapping = Case( @@ -43,23 +43,6 @@ ) -def get_status_mapping(): - """returns Django Case that determines report status based on upload submission""" - upload_exists = Q(upload_submission__isnull=False) - success = Q(upload_submission__fec_status=FECStatus.ACCEPTED) - failed = Q(upload_submission__fecfile_task_state=FECSubmissionState.FAILED) | Q( - upload_submission__fec_status=FECStatus.REJECTED - ) - - return Case( - When(success, then=Value("Submission success")), - When(failed, then=Value("Submission failure")), - When(upload_exists, then=Value("Submission pending")), - default=Value("In progress"), - output_field=CharField(), - ) - - class ReportViewSet(CommitteeOwnedViewMixin, ModelViewSet): """ This viewset automatically provides `list`, `create`, `retrieve`, @@ -70,11 +53,9 @@ class ReportViewSet(CommitteeOwnedViewMixin, ModelViewSet): in CommitteeOwnedViewMixin's implementation of get_queryset() """ - queryset = ( - Report.objects.annotate(report_code_label=report_code_label_mapping) - .annotate(report_status=get_status_mapping()) - .all() - ) + queryset = Report.objects.annotate( + report_code_label=report_code_label_mapping + ).all() serializer_class = ReportSerializer filter_backends = [filters.OrderingFilter] @@ -126,9 +107,10 @@ def hard_delete_reports(self, request): reports = Report.objects.filter(committee_account__committee_id=committee_id) report_count = reports.count() - transaction_count = Transaction.objects.filter( + transactions = Transaction.objects.filter( committee_account__committee_id=committee_id - ).count() + ) + transaction_count = transactions.count() memo_count = MemoText.objects.filter( report__committee_account__committee_id=committee_id ).count() @@ -148,7 +130,8 @@ def hard_delete_reports(self, request): logger.warn(f"Upload Submissions: {upload_submission_count}") logger.warn(f"WebPrint Submissions: {web_print_submission_count}") - reports.hard_delete() + reports.delete() + transactions.hard_delete() return Response(f"Deleted {report_count} Reports") def create(self, request): @@ -165,7 +148,6 @@ def partial_update(self, request, pk=None): def list(self, request, *args, **kwargs): queryset = self.filter_queryset(self.get_queryset()) - if "page" in request.query_params: page = self.paginate_queryset(queryset) if page is not None: diff --git a/django-backend/fecfiler/settings/base.py b/django-backend/fecfiler/settings/base.py index 949fd3ffc8..8617bb49f8 100644 --- a/django-backend/fecfiler/settings/base.py +++ b/django-backend/fecfiler/settings/base.py @@ -124,11 +124,6 @@ "default": dj_database_url.config() } -# Override default test name -DATABASES["default"]["TEST"] = { - "NAME": os.environ.get("FECFILE_TEST_DB_NAME", "postgres") -} - # Connection string for connecting directly DATABASE_URL = os.environ.get("DATABASE_URL") @@ -363,9 +358,6 @@ def get_env_logging_processors(log_format=LINE): FEC_API = env.get_credential("FEC_API") FEC_API_KEY = env.get_credential("FEC_API_KEY") FEC_API_COMMITTEE_LOOKUP_ENDPOINT = str(FEC_API) + "names/committees/" -FEC_API_COMMITTEE_LOOKUP_IDS_OVERRIDE = env.get_credential( - "FEC_API_COMMITTEE_LOOKUP_IDS_OVERRIDE" -) FEC_API_CANDIDATE_LOOKUP_ENDPOINT = str(FEC_API) + "candidates/" FEC_API_CANDIDATE_ENDPOINT = str(FEC_API) + "candidate/" diff --git a/django-backend/fecfiler/transactions/managers.py b/django-backend/fecfiler/transactions/managers.py index bc22744c31..ee9d119683 100644 --- a/django-backend/fecfiler/transactions/managers.py +++ b/django-backend/fecfiler/transactions/managers.py @@ -206,7 +206,9 @@ def LOAN_KEY_CLAUSE(self): # noqa: N802 When( schedule_c__isnull=False, then=Concat( - F("transaction_id"), F("schedule_c__report_coverage_through_date") + F("transaction_id"), + F("schedule_c__report_coverage_through_date"), + Value("LOAN"), ), ), default=None, @@ -217,9 +219,10 @@ def LOAN_PAYMENT_CLAUSE(self): # noqa: N802 return Case( When( schedule_c__isnull=False, - then=Window( - expression=Sum("effective_amount"), **self.loan_payment_window - ), + then=Coalesce(Window( + expression=Sum("effective_amount"), + **self.loan_payment_window + ), Value(Decimal(0))) ) ) diff --git a/django-backend/fecfiler/transactions/schedule_c/utils.py b/django-backend/fecfiler/transactions/schedule_c/utils.py index a350fd9f4a..14df72f09b 100644 --- a/django-backend/fecfiler/transactions/schedule_c/utils.py +++ b/django-backend/fecfiler/transactions/schedule_c/utils.py @@ -85,6 +85,7 @@ def carry_forward_loan(loan, report): "contact_3", "schedule_c", "loan", + "memo_text", ], ) ), diff --git a/django-backend/fecfiler/transactions/schedule_c2/utils.py b/django-backend/fecfiler/transactions/schedule_c2/utils.py index f89c5e5553..c9f053cc83 100644 --- a/django-backend/fecfiler/transactions/schedule_c2/utils.py +++ b/django-backend/fecfiler/transactions/schedule_c2/utils.py @@ -48,9 +48,6 @@ def carry_forward_guarantor(report, new_loan, guarantor): "contact_2_id": guarantor.contact_2_id, "contact_3_id": guarantor.contact_3_id, "schedule_c2": save_copy(guarantor.schedule_c2), - "memo_text": ( - save_copy(guarantor.memo_text) if guarantor.memo_text else None - ), "committee_account_id": new_loan.committee_account_id, "report_id": report.id, "parent_transaction_id": new_loan.id, diff --git a/django-backend/fecfiler/transactions/serializers.py b/django-backend/fecfiler/transactions/serializers.py index 61edd4d5fe..7584b30bbc 100644 --- a/django-backend/fecfiler/transactions/serializers.py +++ b/django-backend/fecfiler/transactions/serializers.py @@ -260,9 +260,9 @@ def to_representation(self, instance): representation["reports"] = [] representation["report_ids"] = [] for report in instance.reports.all(): - representation["reports"].append(ReportSerializer().to_representation( - report - )) + representation["reports"].append( + ReportSerializer().to_representation(report) + ) representation["report_ids"].append(report.id) return representation diff --git a/django-backend/fecfiler/transactions/signals.py b/django-backend/fecfiler/transactions/signals.py index 01a88fb45b..b0e57c45b9 100644 --- a/django-backend/fecfiler/transactions/signals.py +++ b/django-backend/fecfiler/transactions/signals.py @@ -6,6 +6,7 @@ We use signals to log saves to be consistent with delete logging """ + from django.db.models.signals import post_save, post_delete from django.dispatch import receiver from .models import Transaction diff --git a/django-backend/fecfiler/web_services/models.py b/django-backend/fecfiler/web_services/models.py index f18977094c..456034fd9c 100644 --- a/django-backend/fecfiler/web_services/models.py +++ b/django-backend/fecfiler/web_services/models.py @@ -27,7 +27,7 @@ class Meta: db_table = "dot_fecs" -class FECSubmissionState(Enum): +class FECSubmissionState(str, Enum): """States of submission to FEC Can be used for Webload and WebPrint""" @@ -41,7 +41,7 @@ def __str__(self): return str(self.value) -class FECStatus(Enum): +class FECStatus(str, Enum): ACCEPTED = "ACCEPTED" # Webload COMPLETED = "COMPLETED" # WebPrint PROCESSING = "PROCESSING" diff --git a/docker-compose.yml b/docker-compose.yml index 1c2e94f5bb..2ec2012d10 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,9 +1,9 @@ -version: '3' +version: "3" services: db: build: - context: './db' + context: "./db" dockerfile: ${DB_DOCKERFILE:-Dockerfile} args: ENCRYPTION_PASSWORD: ${ENCRYPTION_PASSWORD} @@ -22,13 +22,13 @@ services: redis: image: redis:6.2-alpine ports: - - '6379:6379' + - "6379:6379" command: redis-server api-worker: build: - context: './' - dockerfile: '${WORKER_DOCKERFILE:-Worker_Dockerfile}' + context: "./" + dockerfile: "${WORKER_DOCKERFILE:-Worker_Dockerfile}" image: fecfile-celery-worker container_name: fecfile-celery-worker volumes: @@ -63,8 +63,8 @@ services: api: build: - context: './' - dockerfile: '${API_DOCKERFILE:-Dockerfile}' + context: "./" + dockerfile: "${API_DOCKERFILE:-Dockerfile}" image: fecfile-api container_name: fecfile-api volumes: @@ -105,17 +105,16 @@ services: OUTPUT_TEST_INFO_IN_DOT_FEC: FEC_API: FEC_API_KEY: - FEC_API_COMMITTEE_LOOKUP_IDS_OVERRIDE: LOG_FORMAT: MOCK_OPENFEC: REDIS locust-leader: image: locustio/locust ports: - - "8089:8089" + - "8089:8089" volumes: - ./:/mnt/locust - command: -f /mnt/locust/locustfile.py --master -H http://fecfile-api:8080 + command: -f /mnt/locust/locust-testing/locust_run.py --master -H http://fecfile-api:8080 profiles: [locust] environment: LOCAL_TEST_USER: @@ -126,7 +125,7 @@ services: image: locustio/locust volumes: - ./:/mnt/locust - command: -f /mnt/locust/locustfile.py --worker --master-host locust-leader -L DEBUG + command: -f /mnt/locust/locust-testing/locust_run.py --worker --master-host locust-leader -L DEBUG profiles: [locust] environment: LOCAL_TEST_USER: diff --git a/locust-testing/README.md b/locust-testing/README.md new file mode 100644 index 0000000000..51f9628c14 --- /dev/null +++ b/locust-testing/README.md @@ -0,0 +1,104 @@ +# Locust Testing + +Locust testing is used to simulate swarms of users making requests to an API service, allowing +the testing of API performance under load. Locust tests are first set up by spinning up a +docker container. The user can then visit http://localhost:8089/ to start a testing session. + +The instructions for running tests with Locust follow: + +## (Optional) Prepare testing data + +If you want a measure of consistency when testing with locust, you can pre-generate Contacts, +Reports, and Transactions for use in Locust testing. These will be stored in .json files in +the locust-data subdirectory, with separate files for each resource (e.g, `contacts.locust.json`). +If the script finds .json files, locust testing will preferentially pull resources from them +before creating additional resources randomly as needed. Inter-resource links (such as the +`contact_id` and `report_id` fields on a transaction) are not pre-generated and are instead +determined randomly at run-time. + +You can generate these .json files by running `python locust-testing/locust_data_generator.py` +Run the script with the `-h` flag for additional information. + +## Setup - Environment variables + +- `LOCAL_TEST_USER` + - Committee ID concatenated with email +- `LOCAL_TEST_PWD` + - The password corresponding to the user data in the preceeding variable +- `LOCUST_WANTED_REPORTS` (Optional. Default: 10) + - Determines the number of Report records stored in the API before running locust tasks +- `LOCUST_WANTED_CONTACTS` (Optional. Default: 100) + - Determines the number of Contact records stored in the API before running locust tasks +- `LOCUST_WANTED_TRANSACTIONS` (Optional. Default: 500) + - Determines the number of Transaction records stored in the API before running locust tasks +- `LOCUST_TRANSACTIONS_SINGLE_TO_TRIPLE_RATIO` (Optional. Default: 9 / 10) + - Determines the proportion of transactions that will be created as single transactions + (having no children) vs. triple transactions (with child and grandchild transactions) + + +## Setup - Additional steps for remote testing + +1. Set an additional environment variable: +- `OIDC_SESSION_ID` + - Used to log locust followers into the remote testing environment as part of testing + - You can get the value for this by logging into the desired testing environment with cloud.gov + and retrieving the session ID from any subsequent request header. + +2. Set the target API service for testing in [docker-compose.yml](https://github.com/fecgov/fecfile-web-api/blob/develop/docker-compose.yml#L118): +- As an example, this is what you would set in order to target DEV: + - `-f /mnt/locust/locust_run.py --master -H https://fecfile-web-api-dev.app.cloud.gov` + +## Running Tests + +1. Run the command `docker-compose --profile locust up` to spin up the testing environment +- (Optional) Scale up using docker by adding `--scale locust-follower=4` to the end + +2. Go to http://localhost:8089/ in your browser of choice to run tests. + +### Recommended tests: +- 5 users / 1 ramp-up rate +- 100 users / 1 ramp-up rate +- 500 users / 5 ramp-up rate + +Advanced settings: Run time 5m + +# How our Locust Testing works under the hood + +When you start a test run from your browser, the testing that follows takes part in two phases: +Setup and Tasks. Setup runs once at the start, and then Tasks run on loop for the duration of the +testing (if specified). + +## Setup + +The Setup phase has two jobs: logging in to the target API and preparing data before testing. + +### Log In + +Logging in locally works with a simple request to the API using the legacy debug login. When testing +against a remote service, Locust doesn't actually log in. Instead, Locust uses the session ID stored +in the `OIDC_SESSION_ID` env variable, using a session that you manually created rather than automating +the log in process. This is needed because Login.gov uses two-factor authentication that cannot be +automated at this time. + +### Preparing Data + +Locust (or more accurately *our* Locust setup) needs a bevy of Contacts, Resources, and Transactions to +load test with. Locust wants a specific number of Contact, Report, and Transaction records on the API +prior to beginning the Tasks phase. These numbers can optionally be controlled with environment variables +specified above. + +Once Locust knows how many records it wants, it makes requests against the API to determine the number of +records already present on the server (for the logged in committee). If there aren't as many records as it +wants, Locust will make requests to create new records. The data for these additional records will be taken +from .json files in the "/locust-data" subdirectory in order of appearance within each file. If no .json +file is found or if not enough records are provided, the remaining records will be generated at random. +Each additional record is then submitted to the API. + +## Tasks + +The Task phase consists of swarm followers running functions tagged with the `@task` decorator on-loop for +the duration of the testing session. There are (as of writing) four tasks: +- Celery Test +- Load Contacts +- Load Reports +- Load Transactions \ No newline at end of file diff --git a/locust-testing/__init__.py b/locust-testing/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/locust-testing/locust-data/__init__.py b/locust-testing/locust-data/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/locust-testing/locust_data_generator.py b/locust-testing/locust_data_generator.py new file mode 100644 index 0000000000..4253925357 --- /dev/null +++ b/locust-testing/locust_data_generator.py @@ -0,0 +1,221 @@ +from random import choice, randrange +import json +from os import path + + +def generate_form_3x(count=1, collision_maximum=1000): + reports_and_dates = [ + ["Q1", "01-01", "03-31"], + ["Q2", "04-01", "06-30"], + ["Q3", "07-01", "09-30"], + ["Q4", "10-01", "12-31"] + ] + form_3x_list = [] + dates_taken = set() + collision_count = 0 + + while len(form_3x_list) < count and collision_count < collision_maximum: + quarter, from_date, through_date = choice(reports_and_dates) + year = randrange(1000, 9999) + hashable_date = f"{year}, {quarter}" + if (hashable_date in dates_taken): + collision_count += 1 + continue + + dates_taken.add(hashable_date) + alert_text = "Are you sure you want to submit this form \ + electronically? Please note that you cannot undo this action. \ + Any changes needed will need to be filed as an amended report." + + form_3x_list.append( + { + "hasChangeOfAddress": "true", + "submitAlertText": alert_text, + "report_type": "F3X", + "form_type": "F3XN", + "report_code": quarter, + "date_of_election": None, + "state_of_election": None, + "coverage_from_date": f"{year}-{from_date}", + "coverage_through_date": f"{year}-{through_date}", + } + ) + + return form_3x_list + + +def generate_contacts(count=1): + street_names = ["Main", "Test", "Place", "Home", "Domain", "Victory", "Word"] + street_types = ["St", "Dr", "Ln", "Way"] + last_names = ["Testerson", "Smith", "Worker", "Menace", "Wonder"] + first_names = ["Bill", "George", "Madeline", "May", "Alex"] + prefixes = ["Mr", "Mrs", "Ms", "Mz", "Sir", None] + suffixes = ["I", "II", "III", "IV", "V", None] + + contacts = [] + for _ in range(count): + street_1 = f"{randrange(1, 999)} {choice(street_names)} {choice(street_types)}" + contacts.append({ + "type": "IND", + "street_1": street_1, + "city": "Testville", + "state": "AK", + "zip": "12345", + "country": "USA", + "last_name": choice(last_names), + "first_name": choice(first_names), + "middle_name": choice(first_names), + "prefix": choice(prefixes), + "suffix": choice(suffixes), + "street_2": None, + "telephone": None, + "employer": "Business Inc.", + "occupation": "Job" + }) + + return contacts + + +def generate_single_transactions(count=1, contacts=None, report_ids=None): + transactions = [] + for _ in range(count): + contact = choice(contacts) if contacts else generate_contacts()[0] + new_transaction = { + "children": [], + "form_type": "SA11AI", + "transaction_type_identifier": "INDIVIDUAL_RECEIPT", + "aggregation_group": "GENERAL", + "schema_name": "INDIVIDUAL_RECEIPT", + "report_ids": [choice(report_ids)] if report_ids else None, + "entity_type": "IND", + "contributor_last_name": contact["last_name"], + "contributor_first_name": contact["first_name"], + "contributor_middle_name": contact["middle_name"], + "contributor_prefix": contact["prefix"], + "contributor_suffix": contact["suffix"], + "contributor_street_1": contact["street_1"], + "contributor_street_2": None, + "contributor_city": contact["city"], + "contributor_state": contact["state"], + "contributor_zip": contact["zip"], + "contribution_date": "2024-02-01", + "contribution_amount": randrange(25, 10000), + "contribution_purpose_descrip": None, + "contributor_employer": contact["employer"], + "contributor_occupation": contact["occupation"], + "memo_code": None, + "contact_1": contact, + "contact_1_id": contact.get("id", None), + "schedule_id": "A" + } + + transactions.append(new_transaction) + + return transactions + + +def generate_triple_transactions(count=1, contacts=None, report_ids=None): + triple_transactions = [] + for _ in range(count): + a, b, c = generate_single_transactions(3, contacts, report_ids) + b["children"].append(c) + a["children"].append(b) + triple_transactions.append(a) + + return triple_transactions + + +def save_json(values, name="data"): + directory = path.dirname(path.abspath(__file__)) + filename = f"{name}.locust.json" + full_filename = path.join(directory, "locust-data", filename) + file = open(full_filename, "w") + file.write(json.dumps(values)) + file.close() + + +class LocustDataGenerator: + form3x_count = 0 + form3x_list = [] + contact_count = 0 + contact_list = [] + single_transaction_count = 0 + single_transaction_list = [] + triple_transaction_count = 0 + triple_transaction_list = [] + + def __init__(self, args): + try: + self.form3x_count = int(args.form_3x) + self.contact_count = int(args.contacts) + self.single_transaction_count = int(args.single_transactions) + self.triple_transaction_count = int(args.triple_transactions) + except ValueError: + print("Non-integer value passed as argument") + + if sum([ + self.form3x_count, + self.contact_count, + self.single_transaction_count, + self.triple_transaction_count + ]) == 0: + print("No arguments provided. Run with --help or -h for instructions") + + def build(self): + if self.form3x_count > 0: + self.form3x_list = generate_form_3x(self.form3x_count) + if self.contact_count > 0: + self.contact_list = generate_contacts(self.contact_count) + if self.single_transaction_count > 0: + self.single_transaction_list = generate_single_transactions( + self.single_transaction_count, + self.contact_list + ) + if self.triple_transaction_count > 0: + self.triple_transaction_list = generate_triple_transactions( + self.triple_transaction_count, + self.contact_list + ) + + def dump(self): + if self.form3x_count > 0: + save_json(self.form3x_list, "form-3x") + if self.contact_count > 0: + save_json(self.contact_list, "contacts") + if self.single_transaction_count > 0: + save_json(self.single_transaction_list, "single-transactions") + if self.triple_transaction_count > 0: + save_json(self.triple_transaction_list, "triple-transactions") + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser( + description="This script generates json test data for use in locust testing" + ) + parser.add_argument( + "--form-3x", + default=0, + help="The number of form-3x reports to be defined", + ) + parser.add_argument( + "--contacts", + default=0, + help="The number of contacts to be defined", + ) + parser.add_argument( + "--single-transactions", + default=0, + help="The number of transactions to be created without any children", + ) + parser.add_argument( + "--triple-transactions", + default=0, + help="The number of transactions to be created with children and grandchildren", + ) + args = parser.parse_args() + + generator = LocustDataGenerator(args) + generator.build() + generator.dump() diff --git a/locust-testing/locust_run.py b/locust-testing/locust_run.py new file mode 100644 index 0000000000..0a58432a4b --- /dev/null +++ b/locust-testing/locust_run.py @@ -0,0 +1,314 @@ +import os +import resource +import logging +import random +import json +import math + +from locust import between, task, TaskSet, user +import locust_data_generator + + +TEST_USER = os.environ.get("LOCAL_TEST_USER") +TEST_PWD = os.environ.get("LOCAL_TEST_PWD") +SESSION_ID = os.environ.get("OIDC_SESSION_ID") + +# seconds +TIMEOUT = 30 # seconds + +# item counts +WANTED_REPORTS = os.environ.get("LOCUST_WANTED_REPORTS", 10) +WANTED_CONTACTS = os.environ.get("LOCUST_WANTED_CONTACTS", 100) +WANTED_TRANSACTIONS = os.environ.get("LOCUST_WANTED_TRANSACTIONS", 500) +SINGLE_TO_TRIPLE_RATIO = os.environ.get( + "LOCUST_TRANSACTIONS_SINGLE_TO_TRIPLE_RATIO", + 9 / 10 +) + +SCHEDULES = ["A"] # Further schedules to be implemented in the future + +# Avoid "Too many open files" error +resource.setrlimit(resource.RLIMIT_NOFILE, (10000, 999999)) + + +def get_json_data(name): + directory = os.path.dirname(os.path.abspath(__file__)) + filename = f"{name}.locust.json" + full_filename = os.path.join(directory, "locust-data", filename) + if os.path.isfile(full_filename): + try: + file = open(full_filename, "r") + values = json.loads(file.read()) + file.close() + logging.info(f"Retrieved {len(values)} items from {filename}") + return values + except (IOError, ValueError): + logging.error(f"Unable to retrieve locust data from file {filename}") + + return [] + + +class Tasks(TaskSet): + report_ids = [] + contacts = [] + + def on_start(self): + if "cloud.gov" in self.client.base_url: + self.client.headers = { + "cookie": f"sessionid={SESSION_ID};", + "user-agent": "Locust testing", + } + self.report_ids = self.fetch_values("reports", "id") + self.contacts = self.scrape_endpoint("contacts") + else: + login_response = self.client.post( + "/api/v1/user/login/authenticate", + json={"username": TEST_USER, "password": TEST_PWD} + ) + csrftoken = login_response.cookies.get('csrftoken') + self.client.headers = { + "X-CSRFToken": csrftoken + } + committees = self.fetch_values("committees", "id") + committee_uuid = committees[0] + print("committee_uuid", committee_uuid) + activate_response = self.client.post( + f"/api/v1/committees/{committee_uuid}/activate/" + ) + print("activate_response.status_code", activate_response.status_code) + report_count = self.fetch_count("reports") + contact_count = self.fetch_count("contacts") + transaction_count = self.fetch_count("transactions") + if report_count < WANTED_REPORTS: + logging.info("Not enough reports, creating some") + self.create_reports(WANTED_REPORTS - report_count) + if contact_count < WANTED_CONTACTS: + logging.info("Not enough contacts, creating some") + self.create_contacts(WANTED_CONTACTS - contact_count) + + self.report_ids = self.fetch_values("reports", "id") + logging.info(f"Report ids {self.report_ids}") + self.contacts = self.scrape_endpoint("contacts") + if transaction_count < WANTED_TRANSACTIONS: + logging.info("Not enough transactions, creating some") + difference = WANTED_TRANSACTIONS - transaction_count + singles_needed = math.ceil(difference * SINGLE_TO_TRIPLE_RATIO) + triples_needed = math.ceil(difference * (1 - SINGLE_TO_TRIPLE_RATIO)) + self.create_single_transactions(singles_needed) + self.create_triple_transactions(triples_needed) + + def create_reports(self, count=1): + fields_to_validate = [ + "filing_frequency", + "report_type_category", + "report_code", + "coverage_from_date", + "coverage_through_date", + "date_of_election", + "state_of_election", + "form_type" + ] + params = { + "fields_to_validate": fields_to_validate + } + + reports = get_json_data("form-3x") + if len(reports) < count: + reports = locust_data_generator.generate_form_3x(count - len(reports)) + + for report in reports[:count]: + self.client.post( + "/api/v1/reports/form-3x/", + name="create_report", + # TODO: does it make sense to pass both the params and json here? + params=params, + json=report + ) + + def create_contacts(self, count=1): + contacts = get_json_data("contacts") + if len(contacts) < count: + contacts += locust_data_generator.generate_contacts(count - len(contacts)) + + for contact in contacts[:count]: + self.client.post( + "/api/v1/contacts/", + name="create_contacts", + # TODO: does it make sense to pass both the params and json here? + # Same with create_reports + json=contact, + timeout=TIMEOUT + ) + + def create_single_transactions(self, count=1): + transactions = get_json_data("single-transactions") + self.patch_prebuilt_transactions(transactions) + if len(transactions) < count: + difference = count - len(transactions) + transactions += locust_data_generator.generate_single_transactions( + difference, + self.contacts, + self.report_ids + ) + + for transaction in transactions[:count]: + self.create_transaction(transaction) + + def create_triple_transactions(self, count=1): + transactions = get_json_data("triple-transactions") + self.patch_prebuilt_transactions(transactions) + if len(transactions) < count: + difference = count - len(transactions) + transactions += locust_data_generator.generate_triple_transactions( + difference, + self.contacts, + self.report_ids + ) + + for transaction in transactions[:count]: + self.create_transaction(transaction) + + def patch_prebuilt_transactions(self, transactions): + for t in transactions: + report_id = random.choice(self.report_ids) + + t["report_ids"] = [report_id] + t["contact_1_id"] = random.choice(self.contacts)["id"] + + for child in t["children"]: + child["report_ids"] = [report_id] + child["contact_1_id"] = random.choice(self.contacts)["id"] + for grandchild in child["children"]: + grandchild["report_ids"] = [report_id] + grandchild["contact_1_id"] = random.choice(self.contacts)["id"] + + def create_transaction(self, transaction): + fields_to_validate = [ + "form_type", + "transaction_type_identifier", + "entity_type", + "contributor_last_name", + "contributor_first_name", + "contributor_middle_name", + "contributor_prefix", + "contributor_suffix", + "contributor_street_1", + "contributor_street_2", + "contributor_city", + "contributor_state", + "contributor_zip", + "contribution_date", + "contribution_amount", + "contribution_aggregate", + "aggregation_group", + "contribution_purpose_descrip", + "contributor_employer", + "contributor_occupation", + "memo_code", + "memo_text_description", + "reattribution_redesignation_tag" + ] + params = { + "fields_to_validate": fields_to_validate + } + self.client.post( + "/api/v1/transactions/", + name="create_transactions", + params=params, + json=transaction, + timeout=TIMEOUT + ) + + def fetch_count(self, endpoint): + response = self.get_page(endpoint) + return response.json()["count"] + + def fetch_values(self, endpoint, key): + values = [] + results = self.scrape_endpoint(endpoint) + for result in results: + value = result.get(key, None) + if value is not None: + values.append(value) + + return values + + def scrape_endpoint(self, endpoint): + results = [] + page = 1 + response = self.get_page(endpoint) + if response.status_code == 200: + results = response.json().get("results", []) + while response.status_code == 200 and response.json()["next"] is not None: + results += response.json().get("results", []) + page += 1 + response = self.get_page(endpoint, page=page) + + return results + + def get_page(self, endpoint, page=1): + params = { + "page": page, + "ordering": "form_type", + } + return self.client.get( + f"/api/v1/{endpoint}", + params=params, + name=f"preload_{endpoint}_ids" + ) + + @task + def celery_test(self): + self.client.get( + "/celery-test/", + name="celery-test", + timeout=TIMEOUT + ) + + @task + def load_contacts(self): + params = { + "page": 1, + "ordering": "form_type", + } + self.client.get( + "/api/v1/contacts/", + name="load_contacts", + timeout=TIMEOUT, + params=params + ) + + @task + def load_reports(self): + params = { + "page": 1, + "ordering": "form_type", + } + self.client.get( + "/api/v1/reports/", + name="load_reports", + timeout=TIMEOUT, + params=params + ) + + @task + def load_transactions(self): + report_id = random.choice(self.report_ids) + schedules = random.choice(SCHEDULES) + params = { + "page": 1, + "ordering": "form_type", + "schedules": schedules, + "report_id": report_id, + } + self.client.get( + "/api/v1/transactions/", + name="load_transactions", + timeout=TIMEOUT, + params=params + ) + + +class Swarm(user.HttpUser): + tasks = [Tasks] + wait_time = between(1, 5) diff --git a/locustfile.py b/locustfile.py deleted file mode 100644 index 9d2e3ee037..0000000000 --- a/locustfile.py +++ /dev/null @@ -1,373 +0,0 @@ -"""Load testing for the FECFile API and web app. Run from command directory using the - -*Run tests locally:* -Environment variables: -Ask team about what to set for -`LOCAL_TEST_USER` and `LOCAL_TEST_PWD` - -`docker-compose --profile locust up -d` - -Go to http://0.0.0.0:8089/ to run tests. - -Recommended tests: -5 users / 1 ramp-up rate -100 users / 1 ramp-up rate -500 users / 5 ramp-up rate - -Advanced settings: Run time 5m - -*Run tests on other spaces:* -Log in to that environment, and get the session ID from the header and update the -OIDC_SESSION_ID environment variable on your local machine - -Modifying docker-compose: --f /mnt/locust/locustfile.py --master -H https://fecfile-web-api-dev.app.cloud.gov - -Scale up using docker: -docker-compose --profile locust up -d --scale locust-follower=4 - -Go to http://0.0.0.0:8089/ to run tests. - -""" - -import os -import resource -import logging -import random - -from locust import between, task, TaskSet, user - -TEST_USER = os.environ.get("LOCAL_TEST_USER") -TEST_PWD = os.environ.get("LOCAL_TEST_PWD") -SESSION_ID = os.environ.get("OIDC_SESSION_ID") - -SCHEDULES = ["A", "B,E", "C,D"] -REPORTS_AND_DATES = [ - { - "report_code": "Q1", - "coverage_from_date": "{}-01-01", - "coverage_through_date": "{}-03-31", - }, - { - "report_code": "Q2", - "coverage_from_date": "{}-04-01", - "coverage_through_date": "{}-06-30", - }, - { - "report_code": "Q3", - "coverage_from_date": "{}-07-01", - "coverage_through_date": "{}-09-30", - }, - { - "report_code": "Q4", - "coverage_from_date": "{}-10-01", - "coverage_through_date": "{}-12-31", - } -] - -# seconds -timeout = 30 # seconds - -# Avoid "Too many open files" error -resource.setrlimit(resource.RLIMIT_NOFILE, (10000, 999999)) - - -def generate_random_report(): - """This isn't that many different combinations - could still have some clashes""" - report = random.choice(REPORTS_AND_DATES) - year = random.choice(range(1800, 4040)) - report["coverage_from_date"] = report["coverage_from_date"].format(year) - report["coverage_through_date"] = report["coverage_through_date"].format(year) - return report - - -class Tasks(TaskSet): - - def on_start(self): - if "cloud.gov" in self.client.base_url: - self.client.headers = { - "cookie": f"sessionid={SESSION_ID};", - "user-agent": "Locust testing", - } - else: - login_response = self.client.post( - "/api/v1/user/login/authenticate", - json={"username": TEST_USER, "password": TEST_PWD} - ) - csrftoken = login_response.cookies.get('csrftoken') - self.client.headers = { - "X-CSRFToken": csrftoken - } - committees = self.fetch_ids("committees", "id") - committee_uuid = committees[0] - print("committee_uuid", committee_uuid) - activate_response = self.client.post( - f"/api/v1/committees/{committee_uuid}/activate/" - ) - print("activate_response.status_code", activate_response.status_code) - print('self.fetch_ids("reports", "id")') - print(self.fetch_ids("reports", "id")) - self.reports = self.fetch_ids("reports", "id") - self.contacts = self.fetch_ids("contacts", "id") - if len(self.reports) < 10: - logging.info("Not enough reports, creating some") - self.create_report() - if len(self.contacts) < 10: - logging.info("Not enough contacts, creating some") - self.create_contact() - if len(self.fetch_ids("transactions", "id")) < 10: - logging.info("Not enough transactions, creating some") - self.create_transaction() - - self.reports = self.fetch_ids("reports", "id") - self.contacts = self.fetch_ids("contacts", "id") - - def create_report(self): - report = generate_random_report() - fields_to_validate = [ - "filing_frequency", - "report_type_category", - "report_code", - "coverage_from_date", - "coverage_through_date", - "date_of_election", - "state_of_election", - "form_type" - ] - params = { - "fields_to_validate": fields_to_validate - } - json = { - "hasChangeOfAddress": "true", - "submitAlertText": "Are you sure you want to submit this form \ - electronically? Please note that you cannot undo this action. \ - Any changes needed will need to be filed as an amended report.", - "report_type": "F3X", - "form_type": "F3XN", - "report_code": report.get("report_code"), - "date_of_election": None, - "state_of_election": None, - "coverage_from_date": report.get("coverage_from_date"), - "coverage_through_date": report.get("coverage_through_date") - } - self.client.post( - "/api/v1/reports/form-3x/", - name="create_report", - # TODO: does it make sense to pass both the params and json here? - params=params, - json=json - ) - - def create_contact(self): - json = { - "type": "IND", - "street_1": "123 Main St", - "city": "Washington", - "state": "AL", - "zip": "20000", - "country": "USA", - "last_name": "Business", - "first_name": "Mrs", - "middle_name": None, - "prefix": None, - "suffix": None, - "street_2": None, - "telephone": None, - "employer": None, - "occupation": None - } - self.client.post( - "/api/v1/contacts/", - name="create_contacts", - # TODO: does it make sense to pass both the params and json here? - # Same with create_reports - json=json, - timeout=timeout - ) - - def create_transaction(self): - contact_id = random.choice(self.contacts) - fields_to_validate = [ - "form_type", - "transaction_type_identifier", - "entity_type", - "contributor_last_name", - "contributor_first_name", - "contributor_middle_name", - "contributor_prefix", - "contributor_suffix", - "contributor_street_1", - "contributor_street_2", - "contributor_city", - "contributor_state", - "contributor_zip", - "contribution_date", - "contribution_amount", - "contribution_aggregate", - "aggregation_group", - "contribution_purpose_descrip", - "contributor_employer", - "contributor_occupation", - "memo_code", - "memo_text_description", - "reattribution_redesignation_tag" - ] - params = { - "fields_to_validate": fields_to_validate - } - json = { - "children": [], - "form_type": "SA11AI", - "transaction_type_identifier": "INDIVIDUAL_RECEIPT", - "aggregation_group": "GENERAL", - "contact_1_id": contact_id, - "schema_name": "INDIVIDUAL_RECEIPT", - # "fields_to_validate": - "report_id": random.choice(self.reports), - # "contact_1": { - # "type": "IND", - # "street_1": "11 A St NW Apt 3", - # "city": "Washington", - # "state": "AL", - # "zip": "20000", - # "country": "USA", - # "deleted": None, - # "committee_account_id": "9fa4aa10-b993-4bbf-8eee-e7973c9d87b8", - # "id": contact_id, - # "candidate_id": None, - # "committee_id": None, - # "name": None, - # "last_name": "Business", - # "first_name": "Nunya", - # "middle_name": None, - # "prefix": None, - # "suffix": None, - # "street_2": None, - # "employer": "Business", - # "occupation": "Business", - # "candidate_office": None, - # "candidate_state": None, - # "candidate_district": None, - # "telephone": None, - # "created": "2024-02-08T19:05:15.925Z", - # "updated": "2024-02-08T19:05:15.925Z", - # "transaction_count": 9, - # "full_name_fwd": "nunya business", - # "full_name_bwd": "business nunya" - # }, - "entity_type": "IND", - "contributor_last_name": "Business", - "contributor_first_name": "Nunya", - "contributor_middle_name": None, - "contributor_prefix": None, - "contributor_suffix": None, - "contributor_street_1": "11 A St NW", - "contributor_street_2": None, - "contributor_city": "Washington", - "contributor_state": "AL", - "contributor_zip": "20000", - "contribution_date": "2024-02-01", - "contribution_amount": 1234, - "contribution_aggregate": 102200, - "contribution_purpose_descrip": None, - "contributor_employer": "Business", - "contributor_occupation": "Business", - "memo_code": None, - "date": None, - "amount": None, - "purpose_description": None, - "text4000": None, - "street_1": None, - "street_2": None, - "city": None, - "state": None, - "zip": None, - "aggregate": None, - "last_name": None, - "first_name": None, - "middle_name": None, - "prefix": None, - "suffix": None, - "employer": None, - "occupation": None, - "schedule_id": "A" - } - self.client.post( - "/api/v1/transactions/", - name="create_transactions", - params=params, - json=json, - timeout=timeout - ) - - def fetch_ids(self, endpoint, key): - params = { - "page": 1, - "ordering": "form_type", - } - response = self.client.get( - f"/api/v1/{endpoint}", - params=params, - name=f"preload_{endpoint}_ids" - ) - print("response.json()", response.json()) - if response.status_code == 200: - return [result.get(key) for result in response.json()["results"]] - else: - logging.error(f"{response.status_code} error fetching pre-load id") - - @task - def celery_test(self): - self.client.get( - "/celery-test/", - name="celery-test", - timeout=timeout - ) - - @task - def load_contacts(self): - params = { - "page": 1, - "ordering": "form_type", - } - self.client.get( - "/api/v1/contacts/", - name="load_contacts", - timeout=timeout, - params=params - ) - - @task - def load_reports(self): - params = { - "page": 1, - "ordering": "form_type", - } - self.client.get( - "/api/v1/reports/", - name="load_reports", - timeout=timeout, - params=params - ) - - @task - def load_transactions(self): - report_id = random.choice(self.reports) - schedules = random.choice(SCHEDULES) - params = { - "page": 1, - "ordering": "form_type", - "schedules": schedules, - "report_id": report_id, - } - self.client.get( - "/api/v1/transactions/", - name="load_transactions", - timeout=timeout, - params=params - ) - - -class Swarm(user.HttpUser): - tasks = [Tasks] - wait_time = between(1, 5) diff --git a/manifests/manifest-dev-api.yml b/manifests/manifest-dev-api.yml index 6ef17e66c2..05fb527f9f 100644 --- a/manifests/manifest-dev-api.yml +++ b/manifests/manifest-dev-api.yml @@ -8,7 +8,7 @@ applications: buildpacks: - python_buildpack command: bin/run-api.sh - memory: 1G + memory: 2G services: - fecfile-api-rds - fecfile-api-s3 diff --git a/manifests/manifest-prod-api.yml b/manifests/manifest-prod-api.yml index a3d3861c77..fd2a52b7ef 100644 --- a/manifests/manifest-prod-api.yml +++ b/manifests/manifest-prod-api.yml @@ -8,7 +8,7 @@ applications: buildpacks: - python_buildpack command: bin/run-api.sh - memory: 1G + memory: 2G services: - fecfile-api-rds - fecfile-api-s3 diff --git a/manifests/manifest-stage-api.yml b/manifests/manifest-stage-api.yml index 9a6d1addc6..88faf11a37 100644 --- a/manifests/manifest-stage-api.yml +++ b/manifests/manifest-stage-api.yml @@ -8,7 +8,7 @@ applications: buildpacks: - python_buildpack command: bin/run-api.sh - memory: 1G + memory: 2G services: - fecfile-api-rds - fecfile-api-s3