diff --git a/CHANGELOG.md b/CHANGELOG.md index 30cdbd2..efbde3b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog +#### [v0.4.1](https://github.com/BemiHQ/bemi-prisma/compare/v0.4.0...v0.4.1) - 2024-08-02 + +- Fix compatibility with Prisma v5.15+ + #### [v0.4.0](https://github.com/BemiHQ/bemi-prisma/compare/v0.3.0...v0.4.0) - 2024-04-16 - Fix Next.js actions by removing `@prisma/internals` as a dependency diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..e887fb9 --- /dev/null +++ b/Makefile @@ -0,0 +1,5 @@ +build: + pnpm run build + +publish: + npm publish --access public diff --git a/package.json b/package.json index 86b630f..6aa15a7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@bemi-db/prisma", - "version": "0.4.0", + "version": "0.4.1", "description": "Automatic data change tracking for Prisma", "main": "dist/index.js", "module": "./dist/index.mjs", @@ -22,14 +22,14 @@ }, "homepage": "https://github.com/BemiHQ/bemi-prisma#readme", "dependencies": { - "@prisma/driver-adapter-utils": "~5.9.1", + "@prisma/driver-adapter-utils": "~5.15.0", "commander": "^11.1.0", "kleur": "^4.1.5", "pg": "^8.11.5", "postgres-array": "^3.0.2" }, "peerDependencies": { - "@prisma/client": "^5.9.1" + "@prisma/client": "^5.15.0" }, "devDependencies": { "@types/express": "^4.17.21", diff --git a/src/conversion.ts b/src/conversion.ts index 07df416..d2feb5f 100644 --- a/src/conversion.ts +++ b/src/conversion.ts @@ -1,12 +1,12 @@ -// Last SHA: 4048b4add3aa2695a00db6e83f577b8abaaafab1 +// Last commit: https://github.com/prisma/prisma/commit/b36b73c2c349f80dc456dfd321f3c4cdc2aacfed#diff-848cf139de14ecb4c49c2e3f52703fecef4789f51052651f22ca2c5caf0afe99 -import { type ColumnType, ColumnTypeEnum, JsonNullMarker } from '@prisma/driver-adapter-utils' // @ts-ignore: this is used to avoid the `Module '"/node_modules/@types/pg/index"' has no default export.` error. import pg from 'pg' import { parse as parseArray } from 'postgres-array' const { types } = pg -const { builtins: ScalarColumnType, getTypeParser, setTypeParser } = types +const { builtins: ScalarColumnType, getTypeParser } = types +import { type ColumnType, ColumnTypeEnum } from '@prisma/driver-adapter-utils' /** * PostgreSQL array column types (not defined in ScalarColumnType). @@ -175,45 +175,45 @@ export class UnsupportedNativeDataType extends Error { */ export function fieldToColumnType(fieldTypeId: number): ColumnType { switch (fieldTypeId) { - case ScalarColumnType['INT2']: - case ScalarColumnType['INT4']: + case ScalarColumnType.INT2: + case ScalarColumnType.INT4: return ColumnTypeEnum.Int32 - case ScalarColumnType['INT8']: + case ScalarColumnType.INT8: return ColumnTypeEnum.Int64 - case ScalarColumnType['FLOAT4']: + case ScalarColumnType.FLOAT4: return ColumnTypeEnum.Float - case ScalarColumnType['FLOAT8']: + case ScalarColumnType.FLOAT8: return ColumnTypeEnum.Double - case ScalarColumnType['BOOL']: + case ScalarColumnType.BOOL: return ColumnTypeEnum.Boolean - case ScalarColumnType['DATE']: + case ScalarColumnType.DATE: return ColumnTypeEnum.Date - case ScalarColumnType['TIME']: - case ScalarColumnType['TIMETZ']: + case ScalarColumnType.TIME: + case ScalarColumnType.TIMETZ: return ColumnTypeEnum.Time - case ScalarColumnType['TIMESTAMP']: - case ScalarColumnType['TIMESTAMPTZ']: + case ScalarColumnType.TIMESTAMP: + case ScalarColumnType.TIMESTAMPTZ: return ColumnTypeEnum.DateTime - case ScalarColumnType['NUMERIC']: - case ScalarColumnType['MONEY']: + case ScalarColumnType.NUMERIC: + case ScalarColumnType.MONEY: return ColumnTypeEnum.Numeric - case ScalarColumnType['JSON']: - case ScalarColumnType['JSONB']: + case ScalarColumnType.JSON: + case ScalarColumnType.JSONB: return ColumnTypeEnum.Json - case ScalarColumnType['UUID']: + case ScalarColumnType.UUID: return ColumnTypeEnum.Uuid - case ScalarColumnType['OID']: + case ScalarColumnType.OID: return ColumnTypeEnum.Int64 - case ScalarColumnType['BPCHAR']: - case ScalarColumnType['TEXT']: - case ScalarColumnType['VARCHAR']: - case ScalarColumnType['BIT']: - case ScalarColumnType['VARBIT']: - case ScalarColumnType['INET']: - case ScalarColumnType['CIDR']: - case ScalarColumnType['XML']: + case ScalarColumnType.BPCHAR: + case ScalarColumnType.TEXT: + case ScalarColumnType.VARCHAR: + case ScalarColumnType.BIT: + case ScalarColumnType.VARBIT: + case ScalarColumnType.INET: + case ScalarColumnType.CIDR: + case ScalarColumnType.XML: return ColumnTypeEnum.Text - case ScalarColumnType['BYTEA']: + case ScalarColumnType.BYTEA: return ColumnTypeEnum.Bytes case ArrayColumnType.INT2_ARRAY: case ArrayColumnType.INT4_ARRAY: @@ -280,17 +280,23 @@ function normalize_numeric(numeric: string): string { return numeric } -setTypeParser(ScalarColumnType.NUMERIC, normalize_numeric) -setTypeParser(ArrayColumnType.NUMERIC_ARRAY, normalize_array(normalize_numeric)) - /****************************/ /* Time-related data-types */ /****************************/ +/* + * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD + */ + function normalize_date(date: string): string { return date } +/* + * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format + * ex: 1996-12-19T16:39:57-08:00 + */ + function normalize_timestamp(time: string): string { return time } @@ -313,25 +319,6 @@ function normalize_timez(time: string): string { return time.split('+')[0] } -setTypeParser(ScalarColumnType.TIME, normalize_time) -setTypeParser(ArrayColumnType.TIME_ARRAY, normalize_array(normalize_time)) -setTypeParser(ScalarColumnType.TIMETZ, normalize_timez) - -/* - * DATE, DATE_ARRAY - converts value (or value elements) to a string in the format YYYY-MM-DD - */ - -setTypeParser(ScalarColumnType.DATE, normalize_date) -setTypeParser(ArrayColumnType.DATE_ARRAY, normalize_array(normalize_date)) - -/* - * TIMESTAMP, TIMESTAMP_ARRAY - converts value (or value elements) to a string in the rfc3339 format - * ex: 1996-12-19T16:39:57-08:00 - */ -setTypeParser(ScalarColumnType.TIMESTAMP, normalize_timestamp) -setTypeParser(ArrayColumnType.TIMESTAMP_ARRAY, normalize_array(normalize_timestamp)) -setTypeParser(ScalarColumnType.TIMESTAMPTZ, normalize_timestampz) - /******************/ /* Money handling */ /******************/ @@ -340,30 +327,19 @@ function normalize_money(money: string): string { return money.slice(1) } -setTypeParser(ScalarColumnType.MONEY, normalize_money) -setTypeParser(ArrayColumnType.MONEY_ARRAY, normalize_array(normalize_money)) - /*****************/ /* JSON handling */ /*****************/ /** - * JsonNull are stored in JSON strings as the string "null", distinguishable from - * the `null` value which is used by the driver to represent the database NULL. - * By default, JSON and JSONB columns use JSON.parse to parse a JSON column value - * and this will lead to serde_json::Value::Null in Rust, which will be interpreted - * as DbNull. - * - * By converting "null" to JsonNullMarker, we can signal JsonNull in Rust side and - * convert it to QuaintValue::Json(Some(Null)). + * We hand off JSON handling entirely to engines, so we keep it + * stringified here. This function needs to exist as otherwise + * the default type parser attempts to deserialise it. */ function toJson(json: string): unknown { - return json === 'null' ? JsonNullMarker : JSON.parse(json) + return json } -setTypeParser(ScalarColumnType.JSONB, toJson) -setTypeParser(ScalarColumnType.JSON, toJson) - /************************/ /* Binary data handling */ /************************/ @@ -384,17 +360,6 @@ function encodeBuffer(buffer: Buffer) { */ const parsePgBytes = getTypeParser(ScalarColumnType.BYTEA) as (_: string) => Buffer -/** - * Convert bytes to a JSON-encodable representation since we can't - * currently send a parsed Buffer or ArrayBuffer across JS to Rust - * boundary. - */ -function convertBytes(serializedBytes: string): number[] { - const buffer = parsePgBytes(serializedBytes) - return encodeBuffer(buffer) -} - -setTypeParser(ScalarColumnType.BYTEA, convertBytes) /* * BYTEA_ARRAY - arrays of arbitrary raw binary strings @@ -402,10 +367,23 @@ setTypeParser(ScalarColumnType.BYTEA, convertBytes) const parseBytesArray = getTypeParser(ArrayColumnType.BYTEA_ARRAY) as (_: string) => Buffer[] -setTypeParser(ArrayColumnType.BYTEA_ARRAY, (serializedBytesArray) => { + +// PATCH: Fix TypeScript errors +function normalizeByteaArray(serializedBytesArray: any) { +// PATCH: end const buffers = parseBytesArray(serializedBytesArray) return buffers.map((buf) => (buf ? encodeBuffer(buf) : null)) -}) +} + +/** + * Convert bytes to a JSON-encodable representation since we can't + * currently send a parsed Buffer or ArrayBuffer across JS to Rust + * boundary. + */ +function convertBytes(serializedBytes: string): number[] { + const buffer = parsePgBytes(serializedBytes) + return encodeBuffer(buffer) +} /* BIT_ARRAY, VARBIT_ARRAY */ @@ -413,8 +391,26 @@ function normalizeBit(bit: string): string { return bit } -setTypeParser(ArrayColumnType.BIT_ARRAY, normalize_array(normalizeBit)) -setTypeParser(ArrayColumnType.VARBIT_ARRAY, normalize_array(normalizeBit)) +export const customParsers = { + [ScalarColumnType.NUMERIC]: normalize_numeric, + [ArrayColumnType.NUMERIC_ARRAY]: normalize_array(normalize_numeric), + [ScalarColumnType.TIME]: normalize_time, + [ArrayColumnType.TIME_ARRAY]: normalize_array(normalize_time), + [ScalarColumnType.TIMETZ]: normalize_timez, + [ScalarColumnType.DATE]: normalize_date, + [ArrayColumnType.DATE_ARRAY]: normalize_array(normalize_date), + [ScalarColumnType.TIMESTAMP]: normalize_timestamp, + [ArrayColumnType.TIMESTAMP_ARRAY]: normalize_array(normalize_timestamp), + [ScalarColumnType.TIMESTAMPTZ]: normalize_timestampz, + [ScalarColumnType.MONEY]: normalize_money, + [ArrayColumnType.MONEY_ARRAY]: normalize_array(normalize_money), + [ScalarColumnType.JSON]: toJson, + [ScalarColumnType.JSONB]: toJson, + [ScalarColumnType.BYTEA]: convertBytes, + [ArrayColumnType.BYTEA_ARRAY]: normalizeByteaArray, + [ArrayColumnType.BIT_ARRAY]: normalize_array(normalizeBit), + [ArrayColumnType.VARBIT_ARRAY]: normalize_array(normalizeBit), +} // https://github.com/brianc/node-postgres/pull/2930 export function fixArrayBufferValues(values: unknown[]) { diff --git a/src/pg-adapter.ts b/src/pg-adapter.ts index 4361c10..3ec756b 100644 --- a/src/pg-adapter.ts +++ b/src/pg-adapter.ts @@ -14,7 +14,9 @@ import { Debug, err, ok } from '@prisma/driver-adapter-utils' // @ts-ignore: this is used to avoid the `Module '"/node_modules/@types/pg/index"' has no default export.` error. import pg from 'pg' -import { fieldToColumnType, fixArrayBufferValues, UnsupportedNativeDataType } from './conversion' +import { customParsers, fieldToColumnType, fixArrayBufferValues, UnsupportedNativeDataType } from './conversion' + +const types = pg.types const debug = Debug('prisma:driver-adapter:pg') @@ -33,7 +35,6 @@ import { } from './pg-utils' // PATCH: end -// eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents class PgQueryable implements Queryable { readonly provider = 'postgres' readonly adapterName = '@prisma/adapter-pg' @@ -179,7 +180,36 @@ class PgQueryable implements Quer logger.log(`${logger.tags['info'] ?? ''}`, text) } - const result = await this.client.query({ text, values: fixArrayBufferValues(values), rowMode: 'array' }) + const result = await this.client.query( + { + text, + values: fixArrayBufferValues(values), + rowMode: 'array', + types: { + // This is the error expected: + // No overload matches this call. + // The last overload gave the following error. + // Type '(oid: number, format?: any) => (json: string) => unknown' is not assignable to type '{ (oid: number): TypeParser; (oid: number, format: "text"): TypeParser; (oid: number, format: "binary"): TypeParser<...>; }'. + // Type '(json: string) => unknown' is not assignable to type 'TypeParser'. + // Types of parameters 'json' and 'value' are incompatible. + // Type 'Buffer' is not assignable to type 'string'.ts(2769) + // + // Because pg-types types expect us to handle both binary and text protocol versions, + // where as far we can see, pg will ever pass only text version. + // + // @ts-expect-error + getTypeParser: (oid: number, format: binary) => { + if (format === 'text' && customParsers[oid]) { + return customParsers[oid] + } + + return types.getTypeParser(oid, format) + }, + }, + }, + fixArrayBufferValues(values), + ) + return result } // PATCH: end