Skip to content

Commit

Permalink
Updated test code with a few bug fixes thrown in
Browse files Browse the repository at this point in the history
  • Loading branch information
j-maynard committed Sep 10, 2024
1 parent fe4baa0 commit d981677
Show file tree
Hide file tree
Showing 20 changed files with 935 additions and 616 deletions.
38 changes: 22 additions & 16 deletions src/controllers/csv-processor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { CSVHeader, ViewStream, ViewDTO, ViewErrDTO } from '../dtos/view-dto';
import { Dataset } from '../entities/dataset';
import { Revision } from '../entities/revision';
import { Source } from '../entities/source';
import { Import } from '../entities/import';
import { FileImport } from '../entities/import_file';

import { BlobStorageService } from './blob-storage';
import { DataLakeService } from './datalake';
Expand Down Expand Up @@ -119,13 +119,13 @@ function validateParams(page_number: number, max_page_number: number, page_size:
return errors;
}

export const uploadCSVToBlobStorage = async (fileStream: Readable, filetype: string): Promise<Import> => {
export const uploadCSVToBlobStorage = async (fileStream: Readable, filetype: string): Promise<FileImport> => {
const blobStorageService = new BlobStorageService();
if (!fileStream) {
logger.error('No buffer to upload to blob storage');
throw new Error('No buffer to upload to blob storage');
}
const importRecord = new Import();
const importRecord = new FileImport();
importRecord.id = randomUUID();
importRecord.mime_type = filetype;
const extension = filetype === 'text/csv' ? 'csv' : 'zip';
Expand All @@ -141,7 +141,7 @@ export const uploadCSVToBlobStorage = async (fileStream: Readable, filetype: str
await blobStorageService.uploadFile(`${importRecord.filename}`, fileStream);
const resolvedHash = await promisedHash;
if (resolvedHash) importRecord.hash = resolvedHash;
importRecord.uploaded_at = new Date(Date.now());
importRecord.uploadedAt = new Date(Date.now());
importRecord.type = 'Draft';
importRecord.location = 'BlobStorage';
return importRecord;
Expand All @@ -151,9 +151,9 @@ export const uploadCSVToBlobStorage = async (fileStream: Readable, filetype: str
}
};

export const uploadCSVBufferToBlobStorage = async (fileBuffer: Buffer, filetype: string): Promise<Import> => {
export const uploadCSVBufferToBlobStorage = async (fileBuffer: Buffer, filetype: string): Promise<FileImport> => {
const fileStream = Readable.from(fileBuffer);
const importRecord: Import = await uploadCSVToBlobStorage(fileStream, filetype);
const importRecord: FileImport = await uploadCSVToBlobStorage(fileStream, filetype);
return importRecord;
};

Expand All @@ -172,7 +172,7 @@ async function processCSVData(
page: number,
size: number,
dataset: Dataset,
importObj: Import
importObj: FileImport
): Promise<ViewDTO | ViewErrDTO> {
const dataArray: Array<Array<string>> = (await parse(buffer, {
delimiter: ','
Expand Down Expand Up @@ -241,7 +241,10 @@ async function processCSVData(
};
}

export const getFileFromDataLake = async (dataset: Dataset, importObj: Import): Promise<ViewStream | ViewErrDTO> => {
export const getFileFromDataLake = async (
dataset: Dataset,
importObj: FileImport
): Promise<ViewStream | ViewErrDTO> => {
const datalakeService = new DataLakeService();
let stream: Readable;
try {
Expand All @@ -268,7 +271,7 @@ export const getFileFromDataLake = async (dataset: Dataset, importObj: Import):

export const processCSVFromDatalake = async (
dataset: Dataset,
importObj: Import,
importObj: FileImport,
page: number,
size: number
): Promise<ViewErrDTO | ViewDTO> => {
Expand Down Expand Up @@ -296,7 +299,10 @@ export const processCSVFromDatalake = async (
return processCSVData(buff, page, size, dataset, importObj);
};

export const getFileFromBlobStorage = async (dataset: Dataset, importObj: Import): Promise<ViewStream | ViewErrDTO> => {
export const getFileFromBlobStorage = async (
dataset: Dataset,
importObj: FileImport
): Promise<ViewStream | ViewErrDTO> => {
const blobStoageService = new BlobStorageService();
let stream: Readable;
try {
Expand All @@ -312,7 +318,7 @@ export const getFileFromBlobStorage = async (dataset: Dataset, importObj: Import
{ lang: ENGLISH, message: t('errors.download_from_blobstorage', { lng: ENGLISH }) },
{ lang: WELSH, message: t('errors.download_from_blobstorage', { lng: WELSH }) }
],
tag: { name: 'errors.download_from_datalake', params: {} }
tag: { name: 'errors.download_from_blobstorage', params: {} }
}
],
dataset_id: dataset.id
Expand All @@ -326,7 +332,7 @@ export const getFileFromBlobStorage = async (dataset: Dataset, importObj: Import

export const processCSVFromBlobStorage = async (
dataset: Dataset,
importObj: Import,
importObj: FileImport,
page: number,
size: number
): Promise<ViewErrDTO | ViewDTO> => {
Expand All @@ -345,7 +351,7 @@ export const processCSVFromBlobStorage = async (
{ lang: ENGLISH, message: t('errors.download_from_blobstorage', { lng: ENGLISH }) },
{ lang: WELSH, message: t('errors.download_from_blobstorage', { lng: WELSH }) }
],
tag: { name: 'errors.download_from_datalake', params: {} }
tag: { name: 'errors.download_from_blobstorage', params: {} }
}
],
dataset_id: dataset.id
Expand All @@ -354,7 +360,7 @@ export const processCSVFromBlobStorage = async (
return processCSVData(buff, page, size, dataset, importObj);
};

export const moveFileToDataLake = async (importObj: Import) => {
export const moveFileToDataLake = async (importObj: FileImport) => {
const blobStorageService = new BlobStorageService();
const datalakeService = new DataLakeService();
try {
Expand All @@ -367,7 +373,7 @@ export const moveFileToDataLake = async (importObj: Import) => {
}
};

export const createSources = async (importObj: Import): Promise<RevisionDTO> => {
export const createSources = async (importObj: FileImport): Promise<RevisionDTO> => {
const revision: Revision = await importObj.revision;
const dataset: Dataset = await revision.dataset;
let fileView: ViewDTO | ViewErrDTO;
Expand Down Expand Up @@ -395,7 +401,7 @@ export const createSources = async (importObj: Import): Promise<RevisionDTO> =>
sources.push(source);
source.save();
});
const saveImport = await Import.findOneBy({ id: importObj.id });
const saveImport = await FileImport.findOneBy({ id: importObj.id });
if (!saveImport) {
throw new Error('Import not found');
}
Expand Down
4 changes: 2 additions & 2 deletions src/database-manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { Logger } from 'pino';
import { Dataset } from './entities/dataset';
import { DatasetInfo } from './entities/dataset_info';
import { Revision } from './entities/revision';
import { Import } from './entities/import';
import { FileImport } from './entities/import_file';
import { CsvInfo } from './entities/csv_info';
import { Source } from './entities/source';
import { Dimension } from './entities/dimension';
Expand Down Expand Up @@ -39,7 +39,7 @@ class DatabaseManager {
async initializeDataSource() {
this.dataSource = new DataSource({
...this.datasourceOptions,
entities: [Dataset, DatasetInfo, Revision, Import, CsvInfo, Source, Dimension, DimensionInfo, User]
entities: [Dataset, DatasetInfo, Revision, FileImport, CsvInfo, Source, Dimension, DimensionInfo, User]
});

await this.dataSource
Expand Down
20 changes: 10 additions & 10 deletions src/dtos/dataset-dto.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { Dataset } from '../entities/dataset';
import { Dimension } from '../entities/dimension';
import { DimensionInfo } from '../entities/dimension_info';
import { Source } from '../entities/source';
import { Import } from '../entities/import';
import { FileImport } from '../entities/import_file';
import { Revision } from '../entities/revision';
import { DatasetInfo } from '../entities/dataset_info';

Expand Down Expand Up @@ -88,20 +88,20 @@ export class ImportDTO {
mime_type: string;
filename: string;
hash: string;
uploaded_at: string;
uploadedAt: string;
type: string;
location: string;
sources?: SourceDTO[];

static async fromImport(importEntity: Import): Promise<ImportDTO> {
static async fromImport(importEntity: FileImport): Promise<ImportDTO> {
const dto = new ImportDTO();
dto.id = importEntity.id;
const revision = await importEntity.revision;
dto.revision_id = revision.id;
dto.mime_type = importEntity.mime_type;
dto.filename = importEntity.filename;
dto.hash = importEntity.hash;
dto.uploaded_at = importEntity.uploaded_at?.toISOString() || '';
dto.uploadedAt = importEntity.uploadedAt?.toISOString() || '';
dto.type = importEntity.type;
dto.location = importEntity.location;
dto.sources = await Promise.all(
Expand Down Expand Up @@ -146,14 +146,14 @@ export class RevisionDTO {
revDto.approved_by = (await revision.approvedBy)?.name || undefined;
revDto.created_by = (await revision.createdBy).name;
revDto.imports = await Promise.all(
(await revision.imports).map(async (imp: Import) => {
(await revision.imports).map(async (imp: FileImport) => {
const impDto = new ImportDTO();
impDto.id = imp.id;
impDto.revision_id = (await imp.revision).id;
impDto.mime_type = imp.mime_type;
impDto.filename = imp.filename;
impDto.hash = imp.hash;
impDto.uploaded_at = imp.uploaded_at.toISOString();
impDto.uploadedAt = imp.uploadedAt.toISOString();
impDto.type = imp.type;
impDto.location = imp.location;
impDto.sources = await Promise.all(
Expand Down Expand Up @@ -262,14 +262,14 @@ export class DatasetDTO {
revDto.approved_by = (await revision.approvedBy)?.name || undefined;
revDto.created_by = (await revision.createdBy)?.name;
revDto.imports = await Promise.all(
(await revision.imports).map(async (imp: Import) => {
(await revision.imports).map(async (imp: FileImport) => {
const impDto = new ImportDTO();
impDto.id = imp.id;
impDto.revision_id = (await imp.revision).id;
impDto.mime_type = imp.mime_type;
impDto.filename = imp.filename;
impDto.hash = imp.hash;
impDto.uploaded_at = imp.uploaded_at.toISOString();
impDto.uploadedAt = imp.uploadedAt.toISOString();
impDto.type = imp.type;
impDto.location = imp.location;
impDto.sources = await Promise.all(
Expand Down Expand Up @@ -356,13 +356,13 @@ export class DatasetDTO {
revDto.approved_by = (await revision.approvedBy)?.name || undefined;
revDto.created_by = (await revision.createdBy)?.name;
revDto.imports = await Promise.all(
(await revision.imports).map((imp: Import) => {
(await revision.imports).map((imp: FileImport) => {
const impDto = new ImportDTO();
impDto.id = imp.id;
impDto.mime_type = imp.mime_type;
impDto.filename = imp.filename;
impDto.hash = imp.hash;
impDto.uploaded_at = imp.uploaded_at.toISOString();
impDto.uploadedAt = imp.uploadedAt.toISOString();
impDto.type = imp.type;
impDto.location = imp.location;
return impDto;
Expand Down
6 changes: 3 additions & 3 deletions src/entities/csv_info.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Entity, PrimaryColumn, Column, BaseEntity, ManyToOne, JoinColumn } from 'typeorm';

// eslint-disable-next-line import/no-cycle
import { Import } from './import';
import { FileImport } from './import_file';

@Entity()
export class CsvInfo extends BaseEntity {
Expand All @@ -17,10 +17,10 @@ export class CsvInfo extends BaseEntity {
@Column({ type: 'varchar', length: 2 })
linebreak: string;

@ManyToOne(() => Import, (importEntity) => importEntity.csvInfo, {
@ManyToOne(() => FileImport, (importEntity) => importEntity.csvInfo, {
onDelete: 'CASCADE',
orphanedRowAction: 'delete'
})
@JoinColumn({ name: 'import_id' })
import: Promise<Import>;
import: Promise<FileImport>;
}
10 changes: 5 additions & 5 deletions src/entities/import.ts → src/entities/import_file.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ import { CsvInfo } from './csv_info';
// eslint-disable-next-line import/no-cycle
import { Source } from './source';

@Entity()
export class Import extends BaseEntity {
@Entity({ name: 'file_import', orderBy: { uploadedAt: 'ASC' } })
export class FileImport extends BaseEntity {
@PrimaryGeneratedColumn('uuid')
id: string;

Expand All @@ -43,8 +43,8 @@ export class Import extends BaseEntity {
@Column({ type: 'varchar', length: 255 })
hash: string;

@CreateDateColumn()
uploaded_at: Date;
@CreateDateColumn({ name: 'uploaded_at' })
uploadedAt: Date;

@Column({
type: process.env.NODE_ENV === 'test' ? 'text' : 'enum',
Expand All @@ -55,7 +55,7 @@ export class Import extends BaseEntity {

@Column({
type: process.env.NODE_ENV === 'test' ? 'text' : 'enum',
enum: ['BlobStorage', 'Datalake'],
enum: ['BlobStorage', 'Datalake', 'Unknown'],
nullable: false
})
location: string;
Expand Down
10 changes: 5 additions & 5 deletions src/entities/revision.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import { User } from './user';
// eslint-disable-next-line import/no-cycle
import { Source } from './source';
// eslint-disable-next-line import/no-cycle
import { Import } from './import';
import { FileImport } from './import_file';

interface RevisionInterface {
id: string;
Expand All @@ -28,10 +28,10 @@ interface RevisionInterface {
approvalDate: Date;
approvedBy: Promise<User>;
createdBy: Promise<User>;
imports: Promise<Import[]>;
imports: Promise<FileImport[]>;
}

@Entity()
@Entity({ name: 'revision', orderBy: { creationDate: 'ASC' } })
export class Revision extends BaseEntity implements RevisionInterface {
@PrimaryGeneratedColumn('uuid')
id: string;
Expand Down Expand Up @@ -79,10 +79,10 @@ export class Revision extends BaseEntity implements RevisionInterface {
})
sources: Promise<Source[]>;

@OneToMany(() => Import, (importEntity) => importEntity.revision, {
@OneToMany(() => FileImport, (importEntity) => importEntity.revision, {
cascade: true
})
imports: Promise<Import[]>;
imports: Promise<FileImport[]>;

@ManyToOne(() => User, { nullable: true })
@JoinColumn({ name: 'approved_by' })
Expand Down
6 changes: 3 additions & 3 deletions src/entities/source.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { Entity, PrimaryGeneratedColumn, Column, BaseEntity, ManyToOne, JoinColu
// eslint-disable-next-line import/no-cycle
import { Dimension } from './dimension';
// eslint-disable-next-line import/no-cycle
import { Import } from './import';
import { FileImport } from './import_file';
// eslint-disable-next-line import/no-cycle
import { Revision } from './revision';
import { SourceType } from './source_type';
Expand All @@ -20,13 +20,13 @@ export class Source extends BaseEntity {
@JoinColumn({ name: 'dimension_id' })
dimension: Promise<Dimension>;

@ManyToOne(() => Import, (importEntity) => importEntity.sources, {
@ManyToOne(() => FileImport, (importEntity) => importEntity.sources, {
nullable: false,
onDelete: 'CASCADE',
orphanedRowAction: 'delete'
})
@JoinColumn({ name: 'import_id' })
import: Promise<Import>;
import: Promise<FileImport>;

@ManyToOne(() => Revision, {
onDelete: 'CASCADE',
Expand Down
3 changes: 2 additions & 1 deletion src/resources/locales/cy-GB.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@
"page_number_to_high": "Rhaid i rif y dudalen fod yn llai na neu'n hafal i {{page_number}} (GPT)",
"page_number_to_low": "Rhaid i rif y dudalen fod yn fwy na 0 (GPT)",
"no_datafile": "Dim ffeil data ynghlwm wrth Set Ddata (GPT)",
"download_from_datalake": "Gwall wrth lawrlwytho ffeil o'r llyn data (GPT)"
"download_from_datalake": "Gwall wrth lawrlwytho ffeil o'r llyn data (GPT)",
"download_from_blobstorage": "Gwall wrth lawrlwytho ffeil o’r storfa blob (GPT)"
},
"dimensionInfo": {
"footnotes": {
Expand Down
1 change: 1 addition & 0 deletions src/resources/locales/en-GB.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
"page_number_to_low": "Page number must be greater than 0",
"no_datafile": "No datafile attached to Dataset",
"download_from_datalake": "Error downloading file from datalake",
"download_from_blobstorage": "Error downloading file from blob storage",
"upload": {
"no_title": "No title for the dataset has been provided",
"no-internal-name": "No internal name for the dataset has been provided",
Expand Down
Loading

0 comments on commit d981677

Please sign in to comment.