diff --git a/src/enums/source-type.ts b/src/enums/source-type.ts index fa20c88..961aced 100644 --- a/src/enums/source-type.ts +++ b/src/enums/source-type.ts @@ -1,9 +1,9 @@ /* eslint-disable no-shadow */ /* eslint-disable no-unused-vars */ export enum SourceType { - DATAVALUES = 'DataValues', - FOOTNOTES = 'Footnotes', - DIMENSION = 'Dimension', - IGNORE = 'IGNORE', - UNKNOWN = 'UNKNOWN' + UNKNOWN = 'UNKNOWN', + DATAVALUES = 'DATAVALUES', + DIMENSION = 'DIMENSION', + FOOTNOTES = 'FOOTNOTES', + IGNORE = 'IGNORE' } diff --git a/src/middleware/session.ts b/src/middleware/session.ts index 7aa07e5..d77e2fb 100644 --- a/src/middleware/session.ts +++ b/src/middleware/session.ts @@ -25,7 +25,7 @@ async function createRedisConnection() { createRedisConnection() .then((result) => logger.info(result)) - .catch((ex) => logger.error(ex)); + .catch((ex) => logger.error(`REDIS: ${ex}`)); if (process.env.REDIS_URL) { store = new RedisStore({ diff --git a/src/middleware/translation.ts b/src/middleware/translation.ts index 2c98454..01cdd23 100644 --- a/src/middleware/translation.ts +++ b/src/middleware/translation.ts @@ -6,8 +6,8 @@ const ENGLISH = 'en-GB'; const WELSH = 'cy-GB'; i18next - .use(Backend) .use(i18nextMiddleware.LanguageDetector) + .use(Backend) .init({ detection: { order: ['path', 'header'], @@ -16,10 +16,10 @@ i18next ignoreRoutes: ['/healthcheck', '/public', '/css', '/assets'] }, backend: { - loadPath: `${__dirname}/../resources/locales/{{lng}}.json` + loadPath: `${__dirname}/translations/{{lng}}.json` }, - fallbackLng: ENGLISH, - preload: [ENGLISH, WELSH], + fallbackLng: 'en', + preload: ['en', 'cy'], debug: false }); diff --git a/src/resources/locales/cy-GB.json b/src/middleware/translations/cy.json similarity index 100% rename from src/resources/locales/cy-GB.json rename to src/middleware/translations/cy.json diff --git a/src/resources/locales/en-GB.json b/src/middleware/translations/en.json similarity index 53% rename from src/resources/locales/en-GB.json rename to src/middleware/translations/en.json index 9364921..db7c9d9 100644 --- a/src/resources/locales/en-GB.json +++ b/src/middleware/translations/en.json @@ -1,21 +1,19 @@ { "greeting": "Hello", - "beta": "Beta", - "menu": "Menu", - "publish-dataset": "Publish a Dataset", - "upload-csv": "Upload CSV", - "list-data-files": "List Datasets", - "feedback": "This is a new service – your feedback will help us to improve it.", "share-page": "Share this page", "top-of-page": "Back to top", - "status-field": "statws", "app-running": "App is running", "health-notes": "Expand endpoint to check for database connection and other services.", - "datalake-error": "Unable to connect to get Datalake", + "developer_warning": "Warning This is a developer page and has not been through user research", + "developer_tag": "Developer Page", + "status": { + "warning": "Warning" + }, "buttons": { "continue": "Continue", "back": "Back", "cancel": "Cancel", + "upload_csv": "Upload CSV File", "choose_different": "Choose a different data table" }, "pagination": { @@ -30,15 +28,39 @@ "google": "Login with Google" }, "error": { - "summary-title": "There is a problem", + "summary_title": "There is a problem", "generic": "You could not be logged in. Please try again later.", "expired": "Your session has expired. Please log in again." } }, + "header": { + "beta": "Beta", + "feedback": "This is a new service – your feedback will help us to improve it.", + "navigation": { + "menu": "Menu", + "skip_to_content": "Skip to main content", + "publish-dataset": "Publish a Dataset", + "list-datasets": "List Datasets", + "logout": "Logout" + } + }, + "footer": { + "support_link": "Support Links", + "project_reports": "Project Reports", + "github_repo": "GitHub Repository", + "help": "Help", + "cookies": "Cookies", + "contact": "Contact", + "terms": "Terms and conditions", + "copyright_notice": "All content is available under the Open Government Licence v3.0, except where otherwise stated", + "crown_copyright": "© Crown copyright", + "welsh_gov": "Welsh Government" + }, "homepage": { - "title": "Welcome to StatsWales Beta", - "welcome": "We're going to use the GovUK Design system until we're able to get hold of the Welsh Government GEL", - "apitext": "You can find our api at " + "title": "Welcome to the StatsWales Beta", + "welcome": "This is a test page for development from here you can do the following:", + "uploaded_already": "See whats been uploaded already", + "dataset_creation": "Start a new dataset creation journey" }, "publish": { "start": { @@ -56,7 +78,7 @@ }, "upload": { "title": "Upload the data table", - "note": "The file must be in CSV format" + "note": "The file should be in a CSV format" }, "preview": { "heading": "Check the data table", @@ -69,16 +91,31 @@ "showing": "Showing rows" }, "sources": { - "heading": "What does each column in the data table contain?" + "heading": "What does each column in the data table contain?", + "types": { + "DATAVALUES": "Data values", + "FOOTNOTES": "Notes code", + "DIMENSION": "Dimension", + "IGNORE": "This column can be ignored", + "UNKNOWN": "Select" + } } }, - "upload": { - "title": "Upload a CSV file", - "description": "Upload a CSV file to the data lake", - "datasetName": "Dataset Name", - "datasetDescription": "Dataset Description", - "selectCSV": "Select a CSV file", - "uploadBtn": "Upload CSV" + "view": { + "list": { + "heading": "List Datasets" + }, + "display": { + "heading": "Display a Dataset", + "title": "Title:", + "description": "Description:", + "notes": "Notes", + "contents": "Contents", + "summary": "Summary", + "dimension": "Dimension", + "revision": "Revision", + "import": "File Import" + } }, "errors": { "session": { @@ -87,50 +124,38 @@ "current_import_missing": "Current import is missing from the session", "no_sources_on_import": "Current import has no sources present on it, was the CSV empty?" }, - "source": { + "sources": { "unknowns_found": "You need to tell us what each column contains", "multiple_datavalues": "You can only specify one column as containing data values", "multiple_footnotes": "You can only specify one column as containing notes", "dimension_creation_failed": "Something went wrong trying to create the dimensions. Please try again." }, - "missing": "Filename Missing", + "view": { + "list": { + "no_datasets": "No datasets are currently listed in the database. If you need help please reach out to a member of the development team." + }, + "display": { + "no_dimensions": "This dataset has not been completed with dimensions", + "no_revisions": "This dataset has not been completed with revisions" + } + }, + "datalake_error": "Unable to connect to Datalake", + "blob_storage_errror": "Unable to connect to Blob Storage Service", "problem": "There is a problem", "name_missing": "Dataset Name Missing", "dataset_missing": "No dataset found with this ID", "title": { - "missing": "Title Missing", + "missing": "Either a title was not entered or there is a problem.", "duplicate": "Title already exists" }, "upload": { - "no-csv": "No CSV data provided", - "no-csv-data": "No CSV data available" + "no_csv": "No CSV data provided", + "no_csv_data": "No CSV data available" } }, - "list": { - "title": "Display Data" - }, - "display": { - "title": "Displaying Data for", - "description": "Description:" - }, "api": { "available": "API is available" }, - "header": { - - }, - "footer": { - "support-link": "Support Links", - "project-reports": "Project Reports", - "github-repo": "GitHub Repository", - "help": "Help", - "cookies": "Cookies", - "contact": "Contact", - "terms": "Terms and conditions", - "copyright-notice": "All content is available under the Open Government Licence v3.0, except where otherwise stated", - "crown-copyright": "© Crown copyright", - "welsh-gov": "Welsh Government" - }, "routes": { "healthcheck": "healthcheck", "upload": "upload", diff --git a/src/routes/publish.ts b/src/routes/publish.ts index cd309c9..f6a62be 100644 --- a/src/routes/publish.ts +++ b/src/routes/publish.ts @@ -81,45 +81,49 @@ function generateViewErrors(datasetID: string | undefined, statusCode: number, e } function checkCurrentDataset(req: AuthedRequest, res: Response): DatasetDTO | undefined { + const lang = req.i18n.language; const currentDataset = req.session.currentDataset; if (!currentDataset) { logger.error('No current dataset found in the session... user may have navigated here by mistake'); req.session.errors = generateViewErrors(undefined, 500, [ generateError('session', 'errors.session.current_dataset_missing', {}) ]); - res.redirect(`/${req.i18n.language}/${req.t('routes.publish.start')}/`); + res.redirect(`/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/`); return undefined; } return currentDataset; } function checkCurrentRevision(req: AuthedRequest, res: Response): RevisionDTO | undefined { + const lang = req.i18n.language; const currentRevision = req.session.currentRevision; if (!currentRevision) { logger.error('No current revision found in the session... user may have navigated here by mistake'); req.session.errors = generateViewErrors(undefined, 500, [ generateError('session', 'errors.session.current_revision_missing', {}) ]); - res.redirect(`/${req.i18n.language}/${req.t('routes.publish.start')}/`); + res.redirect(`/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/`); return undefined; } return currentRevision; } function checkCurrentFileImport(req: AuthedRequest, res: Response): ImportDTO | undefined { + const lang = req.i18n.language; const currentFileImport = req.session.currentImport; if (!currentFileImport) { logger.error('No current import found in the session... user may have navigated here by mistake'); req.session.errors = generateViewErrors(undefined, 500, [ generateError('session', 'errors.session.current_import_missing', {}) ]); - res.redirect(`/${req.i18n.language}/${req.t('routes.publish.start')}/`); + res.redirect(`/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/`); return undefined; } return currentFileImport; } async function handleProcessedCSV(processedCSV: UploadDTO | UploadErrDTO, req: AuthedRequest, res: Response) { + const lang = req.i18n.language; if (processedCSV.success) { const viewDTO = processedCSV as ViewDTO; if (!viewDTO.dataset) { @@ -128,9 +132,13 @@ async function handleProcessedCSV(processedCSV: UploadDTO | UploadErrDTO, req: A return; } setCurrentToSession(viewDTO.dataset, req); - res.redirect(`/${req.i18n.language}/publish/preview`); + res.redirect( + `/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/${req.i18n.t('routes.publish.preview', { lng: lang })}` + ); } else { - await createNewDataset(req, res); + const err = processedCSV as UploadErrDTO; + req.session.errors = generateViewErrors(err.dataset?.id, 500, err.errors); + res.redirect(`/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/`); } } @@ -141,7 +149,9 @@ async function createNewDataset(req: AuthedRequest, res: Response) { if (!title) { logger.debug('Current title was missing from the session. Something might have gone wrong'); req.session.errors = generateViewErrors(undefined, 400, [generateError('title', 'errors.title_missing', {})]); - res.redirect(`/${req.i18n.language}/${req.t('routes.publish.start')}/${t('routes.publish.title')}`); + res.redirect( + `/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/${t('routes.publish.title', { lng: lang })}` + ); return; } const file = req.file; @@ -215,26 +225,36 @@ publish.post('/title', upload.none(), (req: AuthedRequest, res: Response) => { return; } req.session.currentTitle = req.body.title; - res.redirect(`/${req.i18n.language}/${req.t('routes.publish.start')}/${req.t('routes.publish.upload')}`); + req.session.save(); + const lang = req.i18n.language; + res.redirect( + `/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/${req.i18n.t('routes.publish.upload', { lng: lang })}` + ); }); publish.get('/upload', (req: AuthedRequest, res: Response) => { const currentTitle = req.session.currentTitle; const currentDataset = req.session.currentDataset; - if (!currentDataset || !currentTitle) { + if (!currentDataset && !currentTitle) { logger.error('There is no title or currentDataset in the session. Abandoning this create journey'); req.session.errors = generateViewErrors(undefined, 500, [generateError('title', 'errors.title.missing', {})]); - res.redirect(`/${req.i18n.language}/${req.t('routes.publish.start')}/${t('routes.publish.title')}`); + req.session.save(); + const lang = req.i18n.language; + res.redirect( + `/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/${t('routes.publish.title', { lng: lang })}` + ); return; } - const title = currentDataset.datasetInfo?.find((info) => info.language === req.i18n.language) || currentTitle; + const title = currentDataset?.datasetInfo?.find((info) => info.language === req.i18n.language) || currentTitle; res.render('publish/upload', { title }); }); publish.post('/upload', upload.single('csv'), async (req: AuthedRequest, res: Response) => { if (req.session.currentDataset) { + logger.info('Dataset present... Amending existing Dataset'); await uploadNewFileToExistingDataset(req, res); } else { + logger.info('Creating a new dataset'); await createNewDataset(req, res); } }); @@ -272,7 +292,8 @@ publish.get('/preview', async (req: AuthedRequest, res: Response) => { req.session.errors = generateViewErrors(undefined, 500, [ generateError('preview', 'errors.preview.failed_to_get_preview', {}) ]); - res.redirect(`/${req.i18n.language}/${req.t('routes.publish.start')}/`); + req.session.save(); + res.redirect(`/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/`); return; } const data = previewData as ViewDTO; @@ -287,24 +308,31 @@ async function confirmFileUpload( req: AuthedRequest, res: Response ) { + const lang = req.i18n.language; try { const confirmedImport: ConfirmedImportDTO = await statsWalesApi.confirmFileImport( currentDataset.id, currentRevision.id, currentFileImport.id ); - console.log(JSON.stringify(confirmedImport, null, 2)); if (confirmedImport.success) { - const fileImport = confirmedImport.fileImport; - req.session.currentImport = fileImport; + req.session.currentImport = confirmedImport.fileImport; req.session.save(); - res.status(200); - res.render('publish/sources', { currentImport: fileImport }); + res.redirect( + `/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/${req.i18n.t('routes.publish.sources', { lng: lang })}` + ); } } catch (err) { - logger.error(err); - res.status(500); - res.render('publish/confirm', { error: err }); + logger.error( + `An HTTP error occurred trying to confirm import from the dataset with the following error: ${err}` + ); + req.session.errors = generateViewErrors(currentDataset.id, 500, [ + generateError('confirm', 'errors.preview.confirm_error', {}) + ]); + req.session.save(); + res.redirect( + `/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/${req.i18n.t('routes.publish.preview', { lng: lang })}` + ); } } @@ -316,18 +344,29 @@ async function rejectFileReturnToUpload( req: AuthedRequest, res: Response ) { + const lang = req.i18n.language; try { - await statsWalesApi.removeFileImport(currentDataset.id, currentRevision.id, currentFileImport.id); req.session.currentImport = undefined; - res.redirect(`/${req.i18n.language}/publish/title`); + await statsWalesApi.removeFileImport(currentDataset.id, currentRevision.id, currentFileImport.id); + req.session.save(); + res.redirect( + `/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/${req.i18n.t('routes.publish.upload')}` + ); } catch (err) { - logger.error(err); - res.status(500); - res.render('publish/confirm', { error: err }); + logger.error( + `An HTTP error occurred trying to remove the import from the dataset with the following error: ${err}` + ); + req.session.errors = generateViewErrors(currentDataset.id, 500, [ + generateError('confirm', 'errors.preview.remove_error', {}) + ]); + req.session.save(); + res.redirect( + `/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/${req.i18n.t('routes.publish.preview', { lng: lang })}` + ); } } -publish.post('/sources', upload.none(), async (req: AuthedRequest, res: Response) => { +publish.post('/preview', upload.none(), async (req: AuthedRequest, res: Response) => { const currentDataset = checkCurrentDataset(req, res); if (!currentDataset) { return; @@ -350,23 +389,50 @@ publish.post('/sources', upload.none(), async (req: AuthedRequest, res: Response req.session.errors = generateViewErrors(undefined, 400, [ generateError('confirmBtn', 'errors.confirm.missing', {}) ]); - res.redirect(`/${req.i18n.language}/publish/preview`); + req.session.save(); + res.redirect( + `/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}/${req.i18n.t('routes.publish.preview', { lng: lang })}` + ); } const statsWalesApi = new StatsWalesApi(lang, req.jwt); if (confirmData === 'true') { + logger.info('User confirmed file upload was correct'); await confirmFileUpload(currentDataset, currentRevision, currentFileImport, statsWalesApi, req, res); } else { + logger.info('User rejected the file in preview'); await rejectFileReturnToUpload(currentDataset, currentRevision, currentFileImport, statsWalesApi, req, res); } }); +function updateCurrentImport(currentImport: ImportDTO, dimensionCreationRequest: DimensionCreationDTO[]) { + if (currentImport.sources) { + currentImport.sources.forEach((source) => { + source.type = + dimensionCreationRequest.find((dim) => dim.sourceId === source.id)?.sourceType || SourceType.UNKNOWN; + }); + } + return currentImport; +} + publish.get('/sources', upload.none(), (req: AuthedRequest, res: Response) => { - const currentFileImport = checkCurrentFileImport(req, res); + const lang = req.i18n.language; + let currentFileImport = checkCurrentFileImport(req, res); const dimensionCreationRequest = req.session.dimensionCreationRequest; if (!currentFileImport) { return; } + if (!currentFileImport.sources) { + logger.error('No current import found in the session with sources... user may have navigated here by mistake'); + req.session.errors = generateViewErrors(undefined, 500, [ + generateError('session', 'errors.session.no_sources_on_import', {}) + ]); + req.session.save(); + res.redirect(`/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}`); + return; + } const errs = req.session.errors; + req.session.errors = undefined; + req.session.save(); if (errs) { res.status(500); } else { @@ -374,19 +440,21 @@ publish.get('/sources', upload.none(), (req: AuthedRequest, res: Response) => { } if (dimensionCreationRequest) { - if (currentFileImport.sources) { - currentFileImport.sources.forEach((source) => { - source.type = - dimensionCreationRequest.find((dim) => dim.sourceId === source.id)?.sourceType || - SourceType.UNKNOWN; - }); - } - } - - res.render('publish/sources', { errors: errs, currentImport: currentFileImport }); + currentFileImport = updateCurrentImport(currentFileImport, dimensionCreationRequest); + } else { + currentFileImport.sources.forEach((source) => { + source.type = SourceType.UNKNOWN; + }); + } + res.render('publish/sources', { + errors: errs, + currentImport: currentFileImport, + sourceTypes: Object.keys(SourceType) + }); }); -publish.post('/source-confirmation', upload.none(), async (req: AuthedRequest, res: Response) => { +publish.post('/sources', upload.none(), async (req: AuthedRequest, res: Response) => { + const lang = req.i18n.language; const currentDataset = checkCurrentDataset(req, res); if (!currentDataset) { return; @@ -407,10 +475,11 @@ publish.post('/source-confirmation', upload.none(), async (req: AuthedRequest, r req.session.errors = generateViewErrors(undefined, 500, [ generateError('session', 'errors.session.no_sources_on_import', {}) ]); - res.redirect(`/${req.i18n.language}/${req.t('routes.publish.start')}/`); + req.session.save(); + res.redirect(`/${lang}/${req.i18n.t('routes.publish.start', { lng: lang })}`); return; } - + logger.info('Creating Dimension Request object'); const dimensionCreationRequest: DimensionCreationDTO[] = currentFileImport.sources.map((source) => { return { sourceId: source.id, @@ -418,35 +487,68 @@ publish.post('/source-confirmation', upload.none(), async (req: AuthedRequest, r }; }); req.session.dimensionCreationRequest = dimensionCreationRequest; - if (dimensionCreationRequest.find((createRequest) => createRequest.sourceType === SourceType.UNKNOWN)) { + req.session.save(); + const updatedFileImportWithSourceType = updateCurrentImport(currentFileImport, dimensionCreationRequest); + console.log(`dimensionCreationRequest = ${JSON.stringify(dimensionCreationRequest)}`); + + logger.info( + `Validating the request before sending to the server, dimensionCreationRequest length = ${dimensionCreationRequest.length}` + ); + const sourcesMarkedUnknown = dimensionCreationRequest.filter( + (createRequest) => createRequest.sourceType === SourceType.UNKNOWN + ); + const sourcesMarkedDataValues = dimensionCreationRequest.filter( + (createRequest) => createRequest.sourceType === SourceType.DATAVALUES + ); + const sourcesMarkedFootnotes = dimensionCreationRequest.filter( + (createRequest) => createRequest.sourceType === SourceType.FOOTNOTES + ); + if (sourcesMarkedUnknown.length > 0) { logger.error('User failed to identify all sources'); - req.session.errors = generateViewErrors(undefined, 400, [ - generateError('session', 'errors.source.unknowns_found', {}) + const errs = generateViewErrors(undefined, 400, [ + generateError('session', 'errors.sources.unknowns_found', {}) ]); - res.redirect(`/${req.i18n.language}/publish/source`); + req.session.errors = errs; + req.session.save(); + res.render('publish/sources', { + errors: errs, + currentImport: updatedFileImportWithSourceType, + sourceTypes: Object.keys(SourceType) + }); return; } - if ( - dimensionCreationRequest.filter((createRequest) => createRequest.sourceType === SourceType.DATAVALUES).length > - 1 - ) { + + if (sourcesMarkedDataValues.length > 1) { logger.error('User tried to specify multiple data value sources'); - req.session.errors = generateViewErrors(undefined, 400, [ - generateError('session', 'errors.source.multiple_datavalues', {}) + const errs = generateViewErrors(undefined, 400, [ + generateError('session', 'errors.sources.multiple_datavalues', {}) ]); - res.redirect(`/${req.i18n.language}/publish/source`); + req.session.errors = errs; + req.session.dimensionCreationRequest = dimensionCreationRequest; + req.session.save(); + res.render('publish/sources', { + errors: errs, + currentImport: updatedFileImportWithSourceType, + sourceTypes: Object.keys(SourceType) + }); return; } - if ( - dimensionCreationRequest.filter((createRequest) => createRequest.sourceType === SourceType.FOOTNOTES).length > 1 - ) { + + if (sourcesMarkedFootnotes.length > 1) { logger.error('User tried to specify multiple footnote sources'); - req.session.errors = generateViewErrors(undefined, 400, [ - generateError('session', 'errors.source.multiple_footnotes', {}) + const errs = generateViewErrors(undefined, 400, [ + generateError('session', 'errors.sources.multiple_footnotes', {}) ]); - res.redirect(`/${req.i18n.language}/publish/source`); + req.session.errors = errs; + req.session.save(); + res.render('publish/sources', { + errors: errs, + currentImport: updatedFileImportWithSourceType, + sourceTypes: Object.keys(SourceType) + }); return; } + logger.info('Dimension creation request checks out... Sending it to the backend to do its thing'); const statsWalesApi = new StatsWalesApi(req.i18n.language, req.jwt); try { const updatedDataset: UploadDTO = await statsWalesApi.sendCreateDimensionRequest( @@ -460,9 +562,24 @@ publish.post('/source-confirmation', upload.none(), async (req: AuthedRequest, r res.json(updatedDataset); } catch (err) { logger.error(`Something went wrong with the Dimension Creation Request with the following error: ${err}`); - req.session.errors = generateViewErrors(undefined, 500, [ - generateError('session', 'errors.source.dimension_creation_failed', {}) + const errs = generateViewErrors(undefined, 500, [ + generateError('session', 'errors.sources.dimension_creation_failed', {}) ]); - res.redirect(`/${req.i18n.language}/publish/source`); + req.session.save(); + res.status(500); + res.render('publish/sources', { + errors: errs, + currentImport: updatedFileImportWithSourceType, + sourceTypes: Object.keys(SourceType) + }); } }); + +publish.get('/session/', (req: AuthedRequest, res: Response) => { + res.status(200); + res.header('mime-type', 'application/json'); + res.json({ + session: req.session, + user: req.user + }); +}); diff --git a/src/routes/view.ts b/src/routes/view.ts index 3dcfe4e..445165a 100644 --- a/src/routes/view.ts +++ b/src/routes/view.ts @@ -20,7 +20,7 @@ const statsWalesApi = (req: AuthedRequest) => { view.get('/', async (req: AuthedRequest, res: Response) => { const fileList: FileList = await statsWalesApi(req).getFileList(); logger.debug(`FileList from server = ${JSON.stringify(fileList)}`); - res.render('list', fileList); + res.render('view/list', fileList); }); view.get('/:datasetId', async (req: AuthedRequest, res: Response) => { @@ -31,7 +31,7 @@ view.get('/:datasetId', async (req: AuthedRequest, res: Response) => { if (!req.params.datasetId || !validateUUID(req.params.datasetId)) { const err: ViewErrDTO = { success: false, - status: 400, + status: 404, dataset_id: undefined, errors: [ { @@ -49,11 +49,10 @@ view.get('/:datasetId', async (req: AuthedRequest, res: Response) => { } const datasetId = req.params.datasetId; - const file = await statsWalesApi(req).getDatasetView(datasetId, page_number, page_size); if (!file.success) { const error = file as ViewErrDTO; res.status(error.status); } - res.render('data', file); + res.render('view/data', file); }); diff --git a/src/services/stats-wales-api.ts b/src/services/stats-wales-api.ts index a282989..23f8ed2 100644 --- a/src/services/stats-wales-api.ts +++ b/src/services/stats-wales-api.ts @@ -34,9 +34,11 @@ export class StatsWalesApi { } public async getFileList() { - logger.debug(`Fetching file list from ${this.backendUrl}/${this.lang}/dataset`); + logger.debug(`Fetching file list from ${this.backendUrl}/${this.lang}/dataset/active`); - const filelist: FileList = await fetch(`${this.backendUrl}/${this.lang}/dataset`, { headers: this.authHeader }) + const filelist: FileList = await fetch(`${this.backendUrl}/${this.lang}/dataset/active`, { + headers: this.authHeader + }) .then((response) => { if (response.ok) { return response.json(); @@ -173,7 +175,7 @@ export class StatsWalesApi { { field: 'csv', tag: { - name: 'errors.upload.no-csv-data', + name: 'errors.upload.no_csv_data', params: {} } } @@ -271,8 +273,8 @@ export class StatsWalesApi { const confirmedDatasetDto = await fetch( `${this.backendUrl}/${this.lang}/dataset/${datasetId}/revision/by-id/${revisionId}/import/by-id/${importId}/sources`, { - method: 'GET', - headers: this.authHeader, + method: 'PATCH', + headers: { ...this.authHeader, 'Content-Type': 'application/json; charset=UTF-8' }, body: JSON.stringify(dimensionCreationDtoArr) } ) @@ -330,7 +332,7 @@ export class StatsWalesApi { { field: 'csv', tag: { - name: 'errors.upload.no-csv-data', + name: 'errors.upload.no_csv_data', params: {} } } diff --git a/src/views/auth/login.ejs b/src/views/auth/login.ejs index 6df191b..6759ed6 100644 --- a/src/views/auth/login.ejs +++ b/src/views/auth/login.ejs @@ -1,4 +1,4 @@ -<%- include("../partials/top"); %> +<%- include("../partials/header"); %>
@@ -29,4 +29,4 @@
-<%- include("../partials/bottom"); %> +<%- include("../partials/footer"); %> diff --git a/src/views/index.ejs b/src/views/index.ejs index dd405a6..2c17319 100644 --- a/src/views/index.ejs +++ b/src/views/index.ejs @@ -1,14 +1,16 @@ -<%- include("partials/top"); %> +<%- include("partials/header", {developerPage: true}); %>

<%= t('homepage.title') %>

-

<%= t('homepage.welcome') %>

- -

<%= t('homepage.apitext') %> /api

+ +
- -<%- include("partials/bottom"); %> \ No newline at end of file + +<%- include("partials/footer"); %> \ No newline at end of file diff --git a/src/views/list.ejs b/src/views/list.ejs deleted file mode 100644 index 863dcd2..0000000 --- a/src/views/list.ejs +++ /dev/null @@ -1,19 +0,0 @@ -<%- include("partials/top"); %> - -
- -
-

<%= t('list.title') %>

- -
-
- -<%- include("partials/bottom"); %> \ No newline at end of file diff --git a/src/views/partials/error-handler.ejs b/src/views/partials/error-handler.ejs new file mode 100644 index 0000000..50917d4 --- /dev/null +++ b/src/views/partials/error-handler.ejs @@ -0,0 +1,18 @@ +<% if (locals?.errors) { %> +
+
+

+ <%= t('errors.problem') %> +

+
+ +
+
+
+<% } %> \ No newline at end of file diff --git a/src/views/partials/bottom.ejs b/src/views/partials/footer.ejs similarity index 97% rename from src/views/partials/bottom.ejs rename to src/views/partials/footer.ejs index 0be6ec2..7d0d960 100644 --- a/src/views/partials/bottom.ejs +++ b/src/views/partials/footer.ejs @@ -12,12 +12,12 @@