Skip to content

Commit

Permalink
merge dev to main (#1166)
Browse files Browse the repository at this point in the history
  • Loading branch information
ymc9 authored Mar 20, 2024
2 parents 242ed43 + 9f89c7e commit ac60a6a
Show file tree
Hide file tree
Showing 2 changed files with 112 additions and 15 deletions.
71 changes: 65 additions & 6 deletions packages/runtime/src/enhancements/policy/handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,7 @@ export class PolicyProxyHandler<DbClient extends DbClientContract> implements Pr
let createResult = await Promise.all(
enumerate(args.data).map(async (item) => {
if (args.skipDuplicates) {
if (await this.hasDuplicatedUniqueConstraint(model, item, db)) {
if (await this.hasDuplicatedUniqueConstraint(model, item, undefined, db)) {
if (this.shouldLogQuery) {
this.logger.info(`[policy] \`createMany\` skipping duplicate ${formatObject(item)}`);
}
Expand Down Expand Up @@ -565,23 +565,82 @@ export class PolicyProxyHandler<DbClient extends DbClientContract> implements Pr
};
}

private async hasDuplicatedUniqueConstraint(model: string, createData: any, db: Record<string, DbOperations>) {
private async hasDuplicatedUniqueConstraint(
model: string,
createData: any,
upstreamQuery: any,
db: Record<string, DbOperations>
) {
// check unique constraint conflicts
// we can't rely on try/catch/ignore constraint violation error: https://github.com/prisma/prisma/issues/20496
// TODO: for simple cases we should be able to translate it to an `upsert` with empty `update` payload

// for each unique constraint, check if the input item has all fields set, and if so, check if
// an entity already exists, and ignore accordingly

const uniqueConstraints = this.utils.getUniqueConstraints(model);

for (const constraint of Object.values(uniqueConstraints)) {
if (constraint.fields.every((f) => createData[f] !== undefined)) {
const uniqueFilter = constraint.fields.reduce((acc, f) => ({ ...acc, [f]: createData[f] }), {});
// the unique filter used to check existence
const uniqueFilter: any = {};

// unique constraint fields not covered yet
const remainingConstraintFields = new Set<string>(constraint.fields);

// collect constraint fields from the create data
for (const [k, v] of Object.entries<any>(createData)) {
if (v === undefined) {
continue;
}

if (remainingConstraintFields.has(k)) {
uniqueFilter[k] = v;
remainingConstraintFields.delete(k);
}
}

// collect constraint fields from the upstream query
if (upstreamQuery) {
for (const [k, v] of Object.entries<any>(upstreamQuery)) {
if (v === undefined) {
continue;
}

if (remainingConstraintFields.has(k)) {
uniqueFilter[k] = v;
remainingConstraintFields.delete(k);
continue;
}

// check if the upstream query contains a relation field which covers
// a foreign key field constraint

const fieldInfo = requireField(this.modelMeta, model, k);
if (!fieldInfo.isDataModel) {
// only care about relation fields
continue;
}

// merge the upstream query into the unique filter
uniqueFilter[k] = v;

// mark the corresponding foreign key fields as covered
const fkMapping = fieldInfo.foreignKeyMapping ?? {};
for (const fk of Object.values(fkMapping)) {
remainingConstraintFields.delete(fk);
}
}
}

if (remainingConstraintFields.size === 0) {
// all constraint fields set, check existence
const existing = await this.utils.checkExistence(db, model, uniqueFilter);
if (existing) {
return true;
}
}
}

return false;
}

Expand Down Expand Up @@ -737,8 +796,8 @@ export class PolicyProxyHandler<DbClient extends DbClientContract> implements Pr
if (args.skipDuplicates) {
// get a reversed query to include fields inherited from upstream mutation,
// it'll be merged with the create payload for unique constraint checking
const reversedQuery = this.utils.buildReversedQuery(context);
if (await this.hasDuplicatedUniqueConstraint(model, { ...reversedQuery, ...item }, db)) {
const upstreamQuery = this.utils.buildReversedQuery(context);
if (await this.hasDuplicatedUniqueConstraint(model, item, upstreamQuery, db)) {
if (this.shouldLogQuery) {
this.logger.info(`[policy] \`createMany\` skipping duplicate ${formatObject(item)}`);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ describe('With Policy:deep nested', () => {
m2 M2? @relation(fields: [m2Id], references: [id], onDelete: Cascade)
m2Id Int?
@@unique([m2Id, value])
@@allow('read', true)
@@allow('create', value > 20)
@@allow('update', value > 21)
Expand Down Expand Up @@ -164,7 +166,7 @@ describe('With Policy:deep nested', () => {
m4: {
create: [
{ id: 'm4-1', value: 22 },
{ id: 'm4-2', value: 22 },
{ id: 'm4-2', value: 23 },
],
},
},
Expand All @@ -190,11 +192,11 @@ describe('With Policy:deep nested', () => {
connectOrCreate: [
{
where: { id: 'm4-2' },
create: { id: 'm4-new', value: 22 },
create: { id: 'm4-new', value: 24 },
},
{
where: { id: 'm4-3' },
create: { id: 'm4-3', value: 23 },
create: { id: 'm4-3', value: 25 },
},
],
},
Expand Down Expand Up @@ -327,7 +329,7 @@ describe('With Policy:deep nested', () => {
await db.m4.create({
data: {
id: 'm4-3',
value: 23,
value: 24,
},
});
const r = await db.m1.update({
Expand Down Expand Up @@ -446,6 +448,19 @@ describe('With Policy:deep nested', () => {
myId: '1',
m2: {
create: {
id: 1,
value: 2,
},
},
},
});

await db.m1.create({
data: {
myId: '2',
m2: {
create: {
id: 2,
value: 2,
},
},
Expand Down Expand Up @@ -483,9 +498,9 @@ describe('With Policy:deep nested', () => {
createMany: {
skipDuplicates: true,
data: [
{ id: 'm4-1', value: 21 },
{ id: 'm4-1', value: 211 },
{ id: 'm4-2', value: 22 },
{ id: 'm4-1', value: 21 }, // should be created
{ id: 'm4-1', value: 211 }, // should be skipped
{ id: 'm4-2', value: 22 }, // should be created
],
},
},
Expand All @@ -495,6 +510,29 @@ describe('With Policy:deep nested', () => {
});
await expect(db.m4.findMany()).resolves.toHaveLength(2);

// createMany skip duplicate with compound unique involving fk
await db.m1.update({
where: { myId: '2' },
data: {
m2: {
update: {
m4: {
createMany: {
skipDuplicates: true,
data: [
{ id: 'm4-3', value: 21 }, // should be created
{ id: 'm4-4', value: 21 }, // should be skipped
],
},
},
},
},
},
});
const allM4 = await db.m4.findMany({ select: { value: true } });
await expect(allM4).toHaveLength(3);
await expect(allM4).toEqual(expect.arrayContaining([{ value: 21 }, { value: 21 }, { value: 22 }]));

// updateMany, filtered out by policy
await db.m1.update({
where: { myId: '1' },
Expand Down Expand Up @@ -556,7 +594,7 @@ describe('With Policy:deep nested', () => {
},
},
});
await expect(db.m4.findMany()).resolves.toHaveLength(2);
await expect(db.m4.findMany()).resolves.toHaveLength(3);

// deleteMany, success
await db.m1.update({
Expand All @@ -573,7 +611,7 @@ describe('With Policy:deep nested', () => {
},
},
});
await expect(db.m4.findMany()).resolves.toHaveLength(1);
await expect(db.m4.findMany()).resolves.toHaveLength(2);
});

it('delete', async () => {
Expand Down

0 comments on commit ac60a6a

Please sign in to comment.