Skip to content

Commit

Permalink
finalize all filters for find dataset endpoints
Browse files Browse the repository at this point in the history
  • Loading branch information
martin-trajanovski committed Dec 12, 2024
1 parent 05eab47 commit 589bf1e
Show file tree
Hide file tree
Showing 5 changed files with 123 additions and 69 deletions.
9 changes: 9 additions & 0 deletions src/common/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -327,6 +327,15 @@ export const parseLimitFilters = (
return { limit, skip, sort };
};

export const parsePipelineSort = (sort: Record<string, "asc" | "desc">) => {
const pipelineSort: Record<string, 1 | -1> = {};
for (const property in sort) {
pipelineSort[property] = sort[property] === "asc" ? 1 : -1;
}

return pipelineSort;
};

export const parseLimitFiltersForPipeline = (
limits: ILimitsFilter | undefined,
): PipelineStage[] => {
Expand Down
48 changes: 45 additions & 3 deletions src/datasets/datasets.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import {
createFullqueryFilter,
extractMetadataKeys,
parseLimitFilters,
parsePipelineSort,
} from "src/common/utils";
import { ElasticSearchService } from "src/elastic-search/elastic-search.service";
import { InitialDatasetsService } from "src/initial-datasets/initial-datasets.service";
Expand All @@ -41,6 +42,7 @@ import {
UpdateDatasetDto,
} from "./dto/update-dataset.dto";
import { isEmpty } from "lodash";
import { OutputDatasetDto } from "./dto/output-dataset.dto";

@Injectable({ scope: Scope.REQUEST })
export class DatasetsService {
Expand Down Expand Up @@ -92,6 +94,44 @@ export class DatasetsService {
return datasets;
}

async findAllComplete(
filter: FilterQuery<DatasetDocument>,
): Promise<OutputDatasetDto[]> {
const whereFilter: FilterQuery<DatasetDocument> = filter.where ?? {};
const fieldsProjection: FilterQuery<DatasetDocument> = filter.fields ?? {};
const limits: QueryOptions<DatasetDocument> = filter.limits ?? {
limit: 100,
skip: 0,
sort: {},
};

const pipeline: PipelineStage[] = [{ $match: whereFilter }];
if (!isEmpty(fieldsProjection)) {
pipeline.push({ $project: fieldsProjection });
}

if (limits.sort) {
const sort = parsePipelineSort(limits.sort);
pipeline.push({ $sort: sort });
}

if (limits.limit) {
pipeline.push({ $limit: limits.limit });
}

if (limits.skip) {
pipeline.push({ $skip: limits.skip });
}

addLookupFields(pipeline, filter.include);

const data = await this.datasetModel
.aggregate<OutputDatasetDto>(pipeline)
.exec();

return data;
}

async fullquery(
filter: IFilters<DatasetDocument, IDatasetFields>,
extraWhereClause: FilterQuery<DatasetDocument> = {},
Expand Down Expand Up @@ -188,7 +228,7 @@ export class DatasetsService {

async findOneComplete(
filter: FilterQuery<DatasetDocument>,
): Promise<DatasetClass | null> {
): Promise<OutputDatasetDto | null> {
const whereFilter: FilterQuery<DatasetDocument> = filter.where ?? {};
const fieldsProjection: FilterQuery<DatasetDocument> = filter.fields ?? {};

Expand All @@ -199,9 +239,11 @@ export class DatasetsService {

addLookupFields(pipeline, filter.include);

const [data] = await this.datasetModel.aggregate(pipeline).exec();
const [data] = await this.datasetModel
.aggregate<OutputDatasetDto | undefined>(pipeline)
.exec();

return data;
return data || null;
}

async count(
Expand Down
21 changes: 11 additions & 10 deletions src/datasets/datasets.v4.controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ import { FilterQuery } from "mongoose";
import { IncludeValidationPipe } from "./pipes/include-validation.pipe";
import { PidValidationPipe } from "./pipes/pid-validation.pipe";
import { FilterValidationPipe } from "./pipes/filter-validation.pipe";
import { swaggerDatasetFilterContent } from "./types/dataset-filter-content";
import { getSwaggerDatasetFilterContent } from "./types/dataset-filter-content";

export interface IDatasetFiltersV4<T, Y = null> {
where?: FilterQuery<T>;
Expand Down Expand Up @@ -386,7 +386,7 @@ export class DatasetsV4Controller {
description: "Database filters to apply when retrieving datasets",
required: false,
type: String,
content: swaggerDatasetFilterContent,
content: getSwaggerDatasetFilterContent(),
})
@ApiResponse({
status: HttpStatus.OK,
Expand All @@ -402,10 +402,7 @@ export class DatasetsV4Controller {
const parsedFilter = JSON.parse(queryFilter ?? "{}");
const mergedFilters = this.addAccessBasedFilters(request, parsedFilter);

console.log(parsedFilter);

// const datasets = await this.datasetsService.findAllComplete(mergedFilters, includeFilters);
const datasets = await this.datasetsService.findAll(mergedFilters);
const datasets = await this.datasetsService.findAllComplete(mergedFilters);

return datasets;
}
Expand Down Expand Up @@ -554,7 +551,7 @@ export class DatasetsV4Controller {
description: "Database filters to apply when retrieving datasets",
required: true,
type: String,
content: swaggerDatasetFilterContent,
content: getSwaggerDatasetFilterContent(false),
})
@ApiResponse({
status: HttpStatus.OK,
Expand All @@ -563,10 +560,13 @@ export class DatasetsV4Controller {
})
async findOne(
@Req() request: Request,
@Query("filter", new FilterValidationPipe(), new IncludeValidationPipe())
@Query(
"filter",
new FilterValidationPipe(false),
new IncludeValidationPipe(),
)
queryFilter: string,
): Promise<OutputDatasetDto | null> {
console.log(process.env);
const parsedFilter = JSON.parse(queryFilter ?? "{}");

const mergedFilters = this.addAccessBasedFilters(request, parsedFilter);
Expand Down Expand Up @@ -598,14 +598,15 @@ export class DatasetsV4Controller {
description: "Database filters to apply when retrieving count for datasets",
required: false,
type: String,
content: swaggerDatasetFilterContent,
content: getSwaggerDatasetFilterContent(),
})
@ApiResponse({
status: HttpStatus.OK,
type: CountApiResponse,
description:
"Return the number of datasets in the following format: { count: integer }",
})
// TODO: Maybe we need to make the filters more granular and allow only needed ones. For example here we need only where filter.
async count(
@Req() request: Request,
@Query("filter", new FilterValidationPipe()) queryFilter?: string,
Expand Down
17 changes: 7 additions & 10 deletions src/datasets/pipes/filter-validation.pipe.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,13 @@ import { OutputDatasetDto } from "src/datasets/dto/output-dataset.dto";

// Dataset specific keys that are allowed
const ALLOWED_DATASET_KEYS = Object.keys(new OutputDatasetDto());

const ALLOWED_LIMIT_KEYS = ["limits", "limit", "skip", "sort"];
// Allowed keys taken from mongoose QuerySelector.
const ALLOWED_FILTER_KEYS = [
"where",
"include",
"fields",
"limits",
"limit",
"skip",
"order",
"$in",
"$eq",
"$gt",
Expand All @@ -28,14 +26,13 @@ const ALLOWED_FILTER_KEYS = [
"$options",
];

const ALL_ALLOWED_FILTER_KEYS = [
...ALLOWED_DATASET_KEYS,
...ALLOWED_FILTER_KEYS,
];

@Injectable()
export class FilterValidationPipe implements PipeTransform<string, string> {
constructor(private includeLimits = true) {}
transform(inValue: string): string {
const allAllowedKeys = this.includeLimits
? [...ALLOWED_DATASET_KEYS, ...ALLOWED_FILTER_KEYS, ...ALLOWED_LIMIT_KEYS]
: [...ALLOWED_DATASET_KEYS, ...ALLOWED_FILTER_KEYS];
const inValueParsed = JSON.parse(inValue ?? "{}");
const flattenFilterKeys = Object.keys(flattenObject(inValueParsed));

Expand All @@ -45,7 +42,7 @@ export class FilterValidationPipe implements PipeTransform<string, string> {
flattenFilterKeys.forEach((key) => {
const keyParts = key.split(".");
const isInAllowedKeys = keyParts.every((part) =>
ALL_ALLOWED_FILTER_KEYS.includes(part),
allAllowedKeys.includes(part),
);

if (!isInAllowedKeys) {
Expand Down
97 changes: 51 additions & 46 deletions src/datasets/types/dataset-filter-content.ts
Original file line number Diff line number Diff line change
@@ -1,61 +1,66 @@
import { ContentObject } from "@nestjs/swagger/dist/interfaces/open-api-spec.interface";
import { boolean } from "mathjs";

const limits = {
limits: {
type: "object",
properties: {
limit: {
type: "number",
},
skip: {
type: "number",
},
order: {
type: "array",
items: {
type: "object",
properties: {
field: {
type: "string",
},
direction: {
type: "string",
},
},
},
},
},
},
};

/**
* NOTE: This is disabled only for the official sdk package generation as the schema validation complains about the content field.
* But we want to have it when we run the application as it improves swagger documentation and usage a lot.
* We use "content" property as it is described in the swagger specification: https://swagger.io/docs/specification/v3_0/describing-parameters/#schema-vs-content:~:text=explode%3A%20false-,content,-is%20used%20in
*/
export const swaggerDatasetFilterContent: ContentObject | undefined = boolean(
process.env.SDK_PACKAGE_SWAGGER_HELPERS_DISABLED ?? false,
)
? undefined
: {
"application/json": {
schema: {
type: "object",
properties: {
where: {
type: "object",
},
include: {
type: "array",
items: {
type: "string",
},
},
fields: {
type: "array",
items: {
type: "string",
export const getSwaggerDatasetFilterContent = (
includeLimits = true,
): ContentObject | undefined =>
boolean(process.env.SDK_PACKAGE_SWAGGER_HELPERS_DISABLED ?? false)
? undefined
: {
"application/json": {
schema: {
type: "object",
properties: {
where: {
type: "object",
},
},
limits: {
type: "object",
properties: {
limit: {
type: "number",
},
skip: {
type: "number",
include: {
type: "array",
items: {
type: "string",
},
order: {
type: "array",
items: {
type: "object",
properties: {
field: {
type: "string",
},
direction: {
type: "string",
},
},
},
},
fields: {
type: "array",
items: {
type: "string",
},
},
...(includeLimits ? limits : {}),
},
},
},
},
};
};

0 comments on commit 589bf1e

Please sign in to comment.