Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

11-07-2024 release #1488

Merged
merged 4 commits into from
Nov 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 0 additions & 41 deletions .github/workflows/build-release.yml

This file was deleted.

9 changes: 0 additions & 9 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,3 @@ jobs:
platforms: linux/amd64,linux/arm64/v8
push: true
tags: ${{ steps.meta.outputs.tags }}

- name: Trigger ESS pipeline
uses: swapActions/trigger-swap-deployment@v1
with:
repository: ${{ github.event.repository.name }}
environment: develop
gh-trigger-url: ${{ secrets.GITLAB_TRIGGER_URL }}
gh-token: ${{ secrets.GITLAB_TRIGGER_TOKEN }}
image-tag: ${{ github.sha }}
29 changes: 17 additions & 12 deletions src/datasets/datasets.controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -528,7 +528,7 @@ export class DatasetsController {
})
@ApiResponse({
status: 201,
type: DatasetClass,
type: OutputDatasetObsoleteDto,
description: "Create a new dataset and return its representation in SciCat",
})
async create(
Expand Down Expand Up @@ -722,7 +722,7 @@ export class DatasetsController {
})
@ApiResponse({
status: 200,
type: DatasetClass,
type: OutputDatasetObsoleteDto,
isArray: true,
description: "Return the datasets requested",
})
Expand Down Expand Up @@ -817,7 +817,7 @@ export class DatasetsController {
})
@ApiResponse({
status: 200,
type: DatasetClass,
type: OutputDatasetObsoleteDto,
isArray: true,
description: "Return datasets requested",
})
Expand Down Expand Up @@ -897,7 +897,7 @@ export class DatasetsController {
})
@ApiResponse({
status: 200,
type: DatasetClass,
type: Object,
isArray: true,
description: "Return datasets requested",
})
Expand Down Expand Up @@ -978,7 +978,7 @@ export class DatasetsController {
})
@ApiResponse({
status: 200,
type: DatasetClass,
type: String,
isArray: true,
description: "Return metadata keys list of datasets selected",
})
Expand Down Expand Up @@ -1049,7 +1049,7 @@ export class DatasetsController {
})
@ApiResponse({
status: 200,
type: DatasetClass,
type: OutputDatasetObsoleteDto,
description: "Return the datasets requested",
})
async findOne(
Expand Down Expand Up @@ -1159,7 +1159,7 @@ export class DatasetsController {
})
@ApiResponse({
status: 200,
type: DatasetClass,
type: OutputDatasetObsoleteDto,
isArray: false,
description: "Return dataset with pid specified",
})
Expand Down Expand Up @@ -1214,7 +1214,7 @@ export class DatasetsController {
})
@ApiResponse({
status: 200,
type: DatasetClass,
type: OutputDatasetObsoleteDto,
description:
"Update an existing dataset and return its representation in SciCat",
})
Expand Down Expand Up @@ -1307,7 +1307,7 @@ export class DatasetsController {
})
@ApiResponse({
status: 200,
type: DatasetClass,
type: OutputDatasetObsoleteDto,
description:
"Update an existing dataset and return its representation in SciCat",
})
Expand Down Expand Up @@ -1434,15 +1434,15 @@ export class DatasetsController {
})
@ApiResponse({
status: 200,
type: DatasetClass,
type: OutputDatasetObsoleteDto,
description: "Return new value of the dataset",
})
async appendToArrayField(
@Req() request: Request,
@Param("pid") pid: string,
@Query("fieldName") fieldName: string,
@Query("data") data: string,
): Promise<DatasetClass | null> {
): Promise<OutputDatasetObsoleteDto | null> {
const user: JWTUser = request.user as JWTUser;
const ability = this.caslAbilityFactory.datasetInstanceAccess(user);
const datasetToUpdate = await this.datasetsService.findOne({
Expand Down Expand Up @@ -1473,7 +1473,12 @@ export class DatasetsController {
},
};

return this.datasetsService.findByIdAndUpdate(pid, updateQuery);
const outputDatasetDto = await this.datasetsService.findByIdAndUpdate(
pid,
updateQuery,
);

return await this.convertCurrentToObsoleteSchema(outputDatasetDto);
}

// GET /datasets/:id/thumbnail
Expand Down
10 changes: 10 additions & 0 deletions src/datasets/dto/update-dataset-obsolete.dto.ts
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,16 @@ export class UpdateDatasetObsoleteDto extends OwnableDto {
@IsOptional()
@IsString()
readonly proposalId?: string;

@ApiProperty({
type: String,
required: false,
description:
"Run number assigned by the system to the data acquisition for the current dataset.",
})
@IsOptional()
@IsString()
readonly runNumber?: string;
}

export class PartialUpdateDatasetObsoleteDto extends PartialType(
Expand Down
9 changes: 0 additions & 9 deletions src/datasets/dto/update-raw-dataset-obsolete.dto.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,15 +52,6 @@ export class UpdateRawDatasetObsoleteDto extends UpdateDatasetObsoleteDto {
@IsString()
readonly dataFormat?: string;

// @ApiProperty({
// type: String,
// required: false,
// description: "The ID of the proposal to which the dataset belongs.",
// })
// @IsOptional()
// @IsString()
// readonly proposalId?: string;

@ApiProperty({
type: String,
required: false,
Expand Down
56 changes: 9 additions & 47 deletions src/datasets/schemas/dataset.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -324,38 +324,6 @@ export class DatasetClass extends OwnableClass {
})
sharedWith?: string[];

// @ApiProperty({
// type: "array",
// items: { $ref: getSchemaPath(Attachment) },
// required: false,
// description:
// "Small, less than 16 MB attachments, envisaged for png/jpeg previews.",
// })
// @Prop({ type: [AttachmentSchema], default: [] })
// attachments?: Attachment[];

// @ApiProperty({
// isArray: true,
// type: OrigDatablock,
// items: { $ref: getSchemaPath(OrigDatablock) },
// required: false,
// description:
// "Containers that list all files and their attributes which make up a dataset. Usually filled at the time the dataset's metadata is created in the data catalog. Can be used by subsequent archiving processes to create the archived datasets.",
// })
// @Prop({ type: [OrigDatablockSchema], default: [] })
// origdatablocks: OrigDatablock[];

// @ApiProperty({
// isArray: true,
// type: Datablock,
// items: { $ref: getSchemaPath(Datablock) },
// required: false,
// description:
// "When archiving a dataset, all files contained in the dataset are listed here together with their checksum information. Several datablocks can be created if the file listing is too long for a single datablock. This partitioning decision is done by the archiving system to allow for chunks of datablocks with manageable sizes. E.g a datasets consisting of 10 TB of data could be split into 10 datablocks of about 1 TB each. The upper limit set by the data catalog system itself is given by the fact that documents must be smaller than 16 MB, which typically allows for datasets of about 100000 files.",
// })
// @Prop({ type: [DatablockSchema], default: [] })
// datablocks: Datablock[];

@ApiProperty({
type: Object,
required: false,
Expand Down Expand Up @@ -388,9 +356,6 @@ export class DatasetClass extends OwnableClass {
})
dataQualityMetrics?: number;

/*
* fields related to Raw Datasets
*/
@ApiProperty({
type: String,
required: false,
Expand Down Expand Up @@ -436,6 +401,15 @@ export class DatasetClass extends OwnableClass {
@Prop({ type: String, required: false })
dataFormat?: string;

@ApiProperty({
type: String,
required: false,
description:
"Run number assigned by the system to the data acquisition for the current dataset.",
})
@Prop({ type: String, required: false })
runNumber?: string;

@ApiProperty({
type: [String],
required: false,
Expand Down Expand Up @@ -463,18 +437,6 @@ export class DatasetClass extends OwnableClass {
@Prop({ type: [String], ref: "Instrument", required: false })
instrumentIds?: string[];

/*
* Derived Dataset
*/
// @ApiProperty({
// type: String,
// required: false,
// description:
// "First name and last name of the person or people pursuing the data analysis. The string may contain a list of names, which should then be separated by semicolons.",
// })
// @Prop({ type: String, required: false, index: true })
// investigator?: string;

@ApiProperty({
type: [String],
required: false,
Expand Down
4 changes: 4 additions & 0 deletions test/RawDataset.js
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,10 @@ describe("1900: RawDataset: Raw Datasets", () => {
res.body.should.have.property("instrumentId").and.be.string;
res.body.should.have.property("proposalId").and.be.string;
res.body.should.have.property("sampleId").and.be.string;
res.body.should.have.property("runNumber").and.be.string;
res.body.should.have
.property("runNumber")
.and.be.equal(TestData.RawCorrect.runNumber);
pid = encodeURIComponent(res.body["pid"]);
});
});
Expand Down
3 changes: 3 additions & 0 deletions test/TestData.js
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,7 @@ const TestData = {
ownerGroup: "p13388",
accessGroups: [],
proposalId: "10.540.16635/20110123",
runNumber: "123456",
instrumentId: "1f016ec4-7a73-11ef-ae3e-439013069377",
sampleId: "20c32b4e-7a73-11ef-9aec-5b9688aa3791i",
type: "raw",
Expand Down Expand Up @@ -319,6 +320,7 @@ const TestData = {
proposalId: process.env.PID_PREFIX
? process.env.PID_PREFIX
: "" + faker.string.numeric(6),
runNumber: faker.string.numeric(6),
type: "raw",
keywords: ["sls", "protein"],
},
Expand Down Expand Up @@ -454,6 +456,7 @@ const TestData = {
accessGroups: [],
type: "derived",
proposalId: "10.540.16635/20110123",
runNumber: "654321",
//instrumentId: "1f016ec4-7a73-11ef-ae3e-439013069377",
//sampleId: "20c32b4e-7a73-11ef-9aec-5b9688aa3791i",
},
Expand Down