Skip to content

Commit

Permalink
1755 - As a data analyst, I need to be able to upload SSP data files (#…
Browse files Browse the repository at this point in the history
…2236)

* added ssp boolean to stt model

* changed stts and datafile models

* made ssp editable

* added radio buttons to frontend

* send ssp to backend through drf

* added frontent tests

* removed comments, fixed radio button

* change migrations

* remove temp files

* fixed test

* linting

* reorder ssp radios

* Fixed db migrations. Added backend test

* fixed serializer tests

* removed redundant ssp in the model

* changes for testing migration issue

* updated db migrations

* changed ssp and migration

* make new migration

* added ssp to populate

* change ssp in migration only

* fix file path

* make ssp upeditable

* update db migration order

* new migrations

* Changing populate_stts to be peek-ahead, always-update STT fields

* Changing populate_stts to be peek-ahead, always-update STT fields

* removing old request_access endpoint test

* plz fix

* add an irrelevant comment

* rm comment

* linting

* linting

* add frontend redux for fileType

* use global file type state in post/get requests

* support ssp in GET param

* linting

* fixed test

* fixed REDIS_URI

* add '-' to header

* rm prints

* cleaner if statement

* spelin

* rm stuffs

* update quarter-month mapping for logentries

* Adding our testfiles to gitignore and removing comments around filenames property. May update again soon

* move file type filter logic to get_queryset

* docstring

* added test for Tribe datafile and moved Tribe logic to serializer

* linting

* removing print and logs

Co-authored-by: mo sohani <[email protected]>
Co-authored-by: abottoms-coder <[email protected]>
Co-authored-by: Jan Timpe <[email protected]>
Co-authored-by: Andrew <[email protected]>
Co-authored-by: Pennington <[email protected]>
Co-authored-by: jtimpe <[email protected]>
  • Loading branch information
7 people authored Nov 8, 2022
1 parent cdc8d05 commit a0fd0b6
Show file tree
Hide file tree
Showing 25 changed files with 302 additions and 154 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ tdrs-backend/coverage.xml
tdrs-backend/htmlcov/*
tdrs-backend/.env
tdrs-backend/.env.production
tdrs-backend/ADS*
tdrs-backend/temp_key_file
tdrs-backend/test
*.pyc
/backend/db.sqlite3
.DS_Store
Expand Down
2 changes: 1 addition & 1 deletion tdrs-backend/docker-compose.local.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ services:
- ACFTITAN_HOST
- ACFTITAN_KEY
- ACFTITAN_USERNAME
- REDIS_URI
- REDIS_URI=redis://redis-server:6379
- REDIS_SERVER_LOCAL=TRUE
- ACFTITAN_SFTP_PYTEST
volumes:
Expand Down
2 changes: 1 addition & 1 deletion tdrs-backend/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ services:
- ACFTITAN_HOST
- ACFTITAN_KEY
- ACFTITAN_USERNAME
- REDIS_URI
- REDIS_URI=redis://redis-server:6379
- REDIS_SERVER_LOCAL=TRUE
- ACFTITAN_SFTP_PYTEST
volumes:
Expand Down
2 changes: 2 additions & 0 deletions tdrs-backend/manifest.buildpack.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ applications:
memory: 512M
instances: 1
disk_quota: 2G
env:
REDIS_URI: redis://localhost:6379
buildpacks:
- https://github.com/cloudfoundry/apt-buildpack
- https://github.com/cloudfoundry/python-buildpack.git#v1.7.55
Expand Down
3 changes: 2 additions & 1 deletion tdrs-backend/tdpservice/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,8 @@ def base_data_file_data(fake_file_name, data_analyst):
"user": str(data_analyst.id),
"quarter": "Q1",
"year": 2020,
"stt": int(data_analyst.stt.id)
"stt": int(data_analyst.stt.id),
"ssp": False,
}


Expand Down
11 changes: 3 additions & 8 deletions tdrs-backend/tdpservice/data_files/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,12 +149,7 @@ class Meta:
@property
def filename(self):
"""Return the correct filename for this data file."""
if str(self.stt.type).lower() == 'tribe':
return self.stt.filenames.get(
('Tribal ' if 'Tribal' not in self.section else '') + self.section,
None)
else:
return self.stt.filenames.get(self.section, None)
return self.stt.filenames.get(self.section, None)

@property
def fiscal_year(self):
Expand All @@ -165,11 +160,11 @@ def fiscal_year(self):
case DataFile.Quarter.Q1:
quarter_month_str = "(Oct - Dec)"
case DataFile.Quarter.Q2:
quarter_month_str = "(Jul - Sep)"
quarter_month_str = "(Jan - Mar)"
case DataFile.Quarter.Q3:
quarter_month_str = "(Apr - Jun)"
case DataFile.Quarter.Q4:
quarter_month_str = "(Jan - Mar)"
quarter_month_str = "(Jul - Sep)"

return f"{self.year} - {self.quarter} {quarter_month_str}"

Expand Down
10 changes: 8 additions & 2 deletions tdrs-backend/tdpservice/data_files/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class DataFileSerializer(serializers.ModelSerializer):
file = serializers.FileField(write_only=True)
stt = serializers.PrimaryKeyRelatedField(queryset=STT.objects.all())
user = serializers.PrimaryKeyRelatedField(queryset=User.objects.all())
ssp = serializers.BooleanField(write_only=True)

class Meta:
"""Metadata."""
Expand All @@ -35,13 +36,18 @@ class Meta:
"year",
"quarter",
"section",
"created_at"
"created_at",
"ssp",
]

def create(self, validated_data):
"""Create a new entry with a new version number."""
ssp = validated_data.pop('ssp')
if ssp:
validated_data['section'] = 'SSP ' + validated_data['section']
if validated_data.get('stt').type == 'tribe':
validated_data['section'] = 'Tribal ' + validated_data['section']
data_file = DataFile.create_new_version(validated_data)

# Determine the matching ClamAVFileScan for this DataFile.
av_scan = ClamAVFileScan.objects.filter(
file_name=data_file.original_filename,
Expand Down
30 changes: 30 additions & 0 deletions tdrs-backend/tdpservice/data_files/test/test_api.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Tests for DataFiles Application."""
from unittest.mock import ANY, patch

from rest_framework import status
import pytest

Expand Down Expand Up @@ -206,6 +207,35 @@ def test_download_data_file_file_rejected_for_other_stt(

assert response.status_code == status.HTTP_403_FORBIDDEN

def test_data_files_data_upload_ssp(
self, api_client, data_file_data,
):
"""Test that when Data Analysts upload file with ssp true the section name is updated."""
data_file_data['ssp'] = True

response = self.post_data_file_file(api_client, data_file_data)
assert response.data['section'] == 'SSP Active Case Data'

def test_data_file_data_upload_tribe(
self, api_client, data_file_data, stt
):
"""Test that when we upload a file for Tribe the section name is updated."""
stt.type = 'tribe'
stt.save()
response = self.post_data_file_file(api_client, data_file_data)
assert 'Tribal Active Case Data' == response.data['section']
stt.type = ''
stt.save()

def test_data_files_data_upload_tanf(
self, api_client, data_file_data,
):
"""Test that when Data Analysts upload file with ssp true the section name is updated."""
data_file_data['ssp'] = False

response = self.post_data_file_file(api_client, data_file_data)
assert response.data['section'] == 'Active Case Data'

def test_data_analyst_gets_email_when_user_uploads_report_for_their_stt(
self, api_client, data_file_data, user
):
Expand Down
5 changes: 1 addition & 4 deletions tdrs-backend/tdpservice/data_files/test/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,4 @@ def test_data_files_filename_is_expected(user):
"user": user,
"stt": stt
})
if stt.type == 'tribe':
assert new_data_file.filename == stt.filenames['Tribal ' if 'Tribal' not in section else '' + section]
else:
assert new_data_file.filename == stt.filenames[section]
assert new_data_file.filename == stt.filenames[section]
12 changes: 12 additions & 0 deletions tdrs-backend/tdpservice/data_files/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,10 +93,22 @@ def create(self, request, *args, **kwargs):

return response

def get_queryset(self):
"""Apply custom queryset filters."""
queryset = super().get_queryset()

if self.request.query_params.get('file_type') == 'ssp-moe':
queryset = queryset.filter(section__contains='SSP')
else:
queryset = queryset.exclude(section__contains='SSP')

return queryset

def filter_queryset(self, queryset):
"""Only apply filters to the list action."""
if self.action != 'list':
self.filterset_class = None

return super().filter_queryset(queryset)

def get_serializer_context(self):
Expand Down
83 changes: 33 additions & 50 deletions tdrs-backend/tdpservice/stts/management/commands/populate_stts.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,54 +21,31 @@ def _populate_regions():
Region.objects.get_or_create(id=row["Id"])
Region.objects.get_or_create(id=1000)


def _get_states():
with open(DATA_DIR / "states.csv") as csvfile:
def _load_csv(filename, entity):
with open(DATA_DIR / filename) as csvfile:
reader = csv.DictReader(csvfile)
return [
STT(
code=row["Code"],
name=row["Name"],
region_id=row["Region"],
type=STT.EntityType.STATE,
filenames=json.loads(row["filenames"].replace('\'', '"')),
stt_code=row["STT_CODE"],
)
for row in reader
]


def _get_territories():
with open(DATA_DIR / "territories.csv") as csvfile:
reader = csv.DictReader(csvfile)
return [
STT(
code=row["Code"],
name=row["Name"],
region_id=row["Region"],
type=STT.EntityType.TERRITORY,
filenames=json.loads(row["filenames"].replace('\'', '"')),
stt_code=row["STT_CODE"],
)
for row in reader
]


def _populate_tribes():
with open(DATA_DIR / "tribes.csv") as csvfile:
reader = csv.DictReader(csvfile)
stts = [
STT(
name=row["Name"],
region_id=row["Region"],
state=STT.objects.get(code=row["Code"]),
type=STT.EntityType.TRIBE,
filenames=json.loads(row["filenames"].replace('\'', '"')),
stt_code=row["STT_CODE"],
)
for row in reader
]
STT.objects.bulk_create(stts, ignore_conflicts=True)
for row in reader:
stt, stt_created = STT.objects.get_or_create(name=row["Name"])
if stt_created: # These lines are spammy, should remove before merge
logger.debug("Created new entry for " + row["Name"])
else:
logger.debug("Found STT " + row["Name"] + ", will sync with data csv.")

stt.code = row["Code"]
stt.region_id = row["Region"]
if filename == "tribes.csv":
stt.state = STT.objects.get(code=row["Code"], type=STT.EntityType.STATE)

stt.type = entity
stt.filenames = json.loads(row["filenames"].replace('\'', '"'))
stt.stt_code = row["STT_CODE"]
stt.ssp = row["SSP"]
# TODO: Was seeing lots of references to STT.objects.filter(pk=...
# We could probably one-line this but we'd miss .save() signals
# https://stackoverflow.com/questions/41744096/
# TODO: we should finish the last columns from the csvs: Sample, SSN_Encrypted
stt.save()


class Command(BaseCommand):
Expand All @@ -79,8 +56,14 @@ class Command(BaseCommand):
def handle(self, *args, **options):
"""Populate the various regions, states, territories, and tribes."""
_populate_regions()
stts = _get_states()
stts.extend(_get_territories())
STT.objects.bulk_create(stts, ignore_conflicts=True)
_populate_tribes()

stt_map = [
("states.csv", STT.EntityType.STATE),
("territories.csv", STT.EntityType.TERRITORY),
("tribes.csv", STT.EntityType.TRIBE)
]

for datafile, entity in stt_map:
_load_csv(datafile, entity)

logger.info("STT import executed by Admin at %s", timezone.now())
Original file line number Diff line number Diff line change
@@ -1,49 +1,9 @@
# Generated by Django 3.2.13 on 2022-06-08 14:43

import csv
import json
from pathlib import Path

from django.core.management import call_command
from django.db import migrations, models


def add_filenames(apps, schema_editor):
call_command("populate_stts")
data_dir = Path(__file__).resolve().parent.parent /"management" / "commands" / "data"
STT = apps.get_model('stts','STT')

fieldnames = ["Code", "Name", "Region", "STT_CODE", "Sample", "SSP", "SSN_Encrypted", "filenames"]
with open(data_dir / "states.csv", "r") as csvfile:
reader = csv.DictReader(csvfile)
rows=[]
for row in reader:
filenames = row["filenames"] = row["filenames"].replace('\'', '"')
rows.append(row)
state = STT.objects.get(name=row["Name"])
state.filenames = json.loads(filenames)
state.save()

with open(data_dir / "tribes.csv", "r") as csvfile:
reader = csv.DictReader(csvfile)
rows=[]
for row in reader:
filenames = row["filenames"] = row["filenames"].replace('\'', '"')
rows.append(row)
tribe = STT.objects.get(name=row["Name"])
tribe.filenames = json.loads(filenames)
tribe.save()

with open(data_dir / "territories.csv", "r") as csvfile:
reader = csv.DictReader(csvfile)
rows=[]
for row in reader:
filenames = row["filenames"] = row["filenames"].replace('\'', '"')
rows.append(row)
territory = STT.objects.get(name=row["Name"])
territory.filenames = json.loads(filenames)
territory.save()

class Migration(migrations.Migration):

dependencies = [
Expand All @@ -56,6 +16,4 @@ class Migration(migrations.Migration):
name='filenames',
field=models.JSONField(blank=True, max_length=512, null=True),
),
migrations.RunPython(add_filenames)
]

18 changes: 18 additions & 0 deletions tdrs-backend/tdpservice/stts/migrations/0007_stt_ssp.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Generated by Django 3.2.15 on 2022-10-28 20:00

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('stts', '0006_alter_stt_filenames'),
]

operations = [
migrations.AddField(
model_name='stt',
name='ssp',
field=models.BooleanField(default=False, null=True),
),
]
1 change: 1 addition & 0 deletions tdrs-backend/tdpservice/stts/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class EntityType(models.TextChoices):
stt_code = models.PositiveIntegerField(blank=True, null=True)
# Tribes have a state, which we need to store.
state = models.ForeignKey("self", on_delete=models.CASCADE, blank=True, null=True)
ssp = models.BooleanField(default=False, null=True)

class Meta:
"""Metadata."""
Expand Down
2 changes: 1 addition & 1 deletion tdrs-backend/tdpservice/stts/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class Meta:
"""Metadata."""

model = STT
fields = ["id", "type", "code", "name", "region"]
fields = ["id", "type", "code", "name", "region", "ssp"]

def get_code(self, obj):
"""Return the state code."""
Expand Down
24 changes: 0 additions & 24 deletions tdrs-backend/tdpservice/users/test/test_api/test_set_profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,30 +38,6 @@ def test_set_profile_data(api_client, user):
assert user.last_name == "Bloggs"


@pytest.mark.django_db
def test_user_can_request_access(api_client, user, stt):
"""Test `access_request` endpoint updates the `account_approval_status` field to `Access Request`."""
api_client.login(username=user.username, password="test_password")

response = api_client.patch(
"/v1/users/request_access/",
{"first_name": "Joe", "last_name": "Bloggs", "stt": stt.id, "email": user.username},
format="json",
)
assert response.data == {
"id": str(user.id),
"email": user.username,
"first_name": "Joe",
"last_name": "Bloggs",
"access_request": False, # old value no longer touched
"account_approval_status": "Access request", # new value updated
"stt": {"id": stt.id, "type": stt.type, "code": stt.code, "name": stt.name, "region": stt.region.id},
"region": None,
"roles": [],
}

# TODO: In the future, we would like to test that users can be activated and their roles are correctly assigned.

@pytest.mark.django_db
def test_cannot_set_account_approval_status_through_api(api_client, user):
"""Test that the `account_approval_status` field cannot be updated through an api call to `set_profile`."""
Expand Down
Loading

0 comments on commit a0fd0b6

Please sign in to comment.