From 250ff7868e3fae562ec077ffd740b7d9a901bf53 Mon Sep 17 00:00:00 2001 From: Alec Aivazis Date: Fri, 8 Mar 2024 23:34:05 -0800 Subject: [PATCH] Fix bug in React framwork when hydrating scalars (#1282) --- .changeset/honest-dingos-perform.md | 5 + e2e/_api/graphql.mjs | 63 +++++- e2e/_api/schema.graphql | 1 + e2e/react/houdini.config.js | 3 + e2e/react/package.json | 5 +- e2e/react/schema.graphql | 189 ++++++++++++++++++ e2e/react/src/+client.ts | 4 +- e2e/react/src/api/+schema.ts | 4 - e2e/react/src/api/builder.ts | 22 -- e2e/react/src/api/users.ts | 141 ------------- e2e/react/src/api/utils.ts | 140 ------------- .../component_fields/arguments/+page.gql | 2 +- .../component_fields/arguments/+page.tsx | 2 +- .../routes/component_fields/simple/+page.gql | 2 +- .../routes/component_fields/simple/+page.tsx | 2 +- e2e/react/src/routes/scalars/+page.gql | 6 + e2e/react/src/routes/scalars/+page.tsx | 13 ++ e2e/react/src/routes/scalars/test.ts | 9 + e2e/react/src/utils/routes.ts | 3 +- packages/houdini-react/src/plugin/vite.tsx | 22 +- .../src/runtime/routing/Router.tsx | 40 +++- .../codegen/generators/typescript/types.ts | 21 +- .../src/runtime/client/documentStore.test.ts | 4 + packages/houdini/src/runtime/client/index.ts | 56 ++++-- .../src/runtime/client/plugins/fragment.ts | 126 ++++++------ .../src/runtime/client/plugins/mutation.ts | 155 +++++++------- .../src/runtime/client/plugins/query.test.ts | 5 +- .../src/runtime/client/plugins/query.ts | 156 +++++++-------- packages/houdini/src/runtime/lib/scalars.ts | 18 +- pnpm-lock.yaml | 38 ++-- 30 files changed, 644 insertions(+), 613 deletions(-) create mode 100644 .changeset/honest-dingos-perform.md create mode 100644 e2e/react/schema.graphql delete mode 100644 e2e/react/src/api/+schema.ts delete mode 100644 e2e/react/src/api/builder.ts delete mode 100644 e2e/react/src/api/users.ts delete mode 100644 e2e/react/src/api/utils.ts create mode 100644 e2e/react/src/routes/scalars/+page.gql create mode 100644 e2e/react/src/routes/scalars/+page.tsx create mode 100644 e2e/react/src/routes/scalars/test.ts diff --git a/.changeset/honest-dingos-perform.md b/.changeset/honest-dingos-perform.md new file mode 100644 index 000000000..782b0ed4e --- /dev/null +++ b/.changeset/honest-dingos-perform.md @@ -0,0 +1,5 @@ +--- +'houdini-react': patch +--- + +Fix bug marshaling/unmarshaling scalars over network diff --git a/e2e/_api/graphql.mjs b/e2e/_api/graphql.mjs index 09cfe7a67..6f2b3c117 100644 --- a/e2e/_api/graphql.mjs +++ b/e2e/_api/graphql.mjs @@ -68,14 +68,61 @@ let monkeys = [ // example data const dataUsers = [ - { id: '1', name: 'Bruce Willis', birthDate: new Date(1955, 2, 19) }, - { id: '2', name: 'Samuel Jackson', birthDate: new Date(1948, 11, 21) }, - { id: '3', name: 'Morgan Freeman', birthDate: new Date(1937, 5, 0) }, - { id: '4', name: 'Tom Hanks', birthDate: new Date(1956, 6, 9) }, - { id: '5', name: 'Will Smith', birthDate: new Date(1968, 8, 25) }, - { id: '6', name: 'Harrison Ford', birthDate: new Date(1942, 6, 13) }, - { id: '7', name: 'Eddie Murphy', birthDate: new Date(1961, 3, 3) }, - { id: '8', name: 'Clint Eastwood', birthDate: new Date(1930, 5, 31) }, + { + id: '1', + name: 'Bruce Willis', + birthDate: new Date(1955, 2, 19), + avatarURL: + 'https://variety.com/wp-content/uploads/2022/03/Bruce-Willis.jpg?w=1000&h=562&crop=1', + }, + { + id: '2', + name: 'Samuel Jackson', + birthDate: new Date(1948, 11, 21), + avatarURL: 'https://imaging.broadway.com/images/regular-43/w750/122004-11.jpeg', + }, + { + id: '3', + name: 'Morgan Freeman', + birthDate: new Date(1937, 5, 0), + avatarURL: + 'https://www.themoviedb.org/t/p/w600_and_h900_bestv2/jPsLqiYGSofU4s6BjrxnefMfabb.jpg', + }, + { + id: '4', + name: 'Tom Hanks', + birthDate: new Date(1956, 6, 9), + avatarURL: + 'https://upload.wikimedia.org/wikipedia/commons/thumb/a/a9/Tom_Hanks_TIFF_2019.jpg/440px-Tom_Hanks_TIFF_2019.jpg', + }, + { + id: '5', + name: 'Will Smith', + birthDate: new Date(1968, 8, 25), + avatarURL: + 'https://upload.wikimedia.org/wikipedia/commons/thumb/3/3f/TechCrunch_Disrupt_2019_%2848834434641%29_%28cropped%29.jpg/440px-TechCrunch_Disrupt_2019_%2848834434641%29_%28cropped%29.jpg', + }, + { + id: '6', + name: 'Harrison Ford', + birthDate: new Date(1942, 6, 13), + avatarURL: + 'https://upload.wikimedia.org/wikipedia/commons/thumb/3/34/Harrison_Ford_by_Gage_Skidmore_3.jpg/1280px-Harrison_Ford_by_Gage_Skidmore_3.jpg', + }, + { + id: '7', + name: 'Eddie Murphy', + birthDate: new Date(1961, 3, 3), + avatarURL: + 'https://upload.wikimedia.org/wikipedia/commons/thumb/5/5f/Eddie_Murphy_by_David_Shankbone.jpg/440px-Eddie_Murphy_by_David_Shankbone.jpg', + }, + { + id: '8', + name: 'Clint Eastwood', + birthDate: new Date(1930, 5, 31), + avatarURL: + 'https://prod-images.tcm.com/Master-Profile-Images/ClintEastwood.55386.jpg?w=824', + }, ] let dataRentedBooks = [ diff --git a/e2e/_api/schema.graphql b/e2e/_api/schema.graphql index c23483dee..b72816845 100644 --- a/e2e/_api/schema.graphql +++ b/e2e/_api/schema.graphql @@ -135,6 +135,7 @@ type User implements Node { enumValue: MyEnum types: [TypeOfUser!]! testField(someParam: Boolean!): String + avatarURL(size: Int): String! } interface Animal implements Node { diff --git a/e2e/react/houdini.config.js b/e2e/react/houdini.config.js index 458347bf7..316b80b02 100644 --- a/e2e/react/houdini.config.js +++ b/e2e/react/houdini.config.js @@ -2,6 +2,9 @@ /// /** @type {import('houdini').ConfigFile} */ const config = { + watchSchema: { + url: 'http://localhost:4000/graphql', + }, defaultPartial: true, scalars: { DateTime: { diff --git a/e2e/react/package.json b/e2e/react/package.json index c8c051700..23385e2aa 100644 --- a/e2e/react/package.json +++ b/e2e/react/package.json @@ -4,13 +4,15 @@ "version": "0.0.0", "type": "module", "scripts": { + "api": "cross-env TZ=utc e2e-api", "build:": "cd ../../ && ((pnpm run build && cd -) || (cd - && exit 1))", "build:dev": "pnpm build: && pnpm dev", "build:web": "pnpm build: && pnpm web", "build:test": "pnpm build: && pnpm test", "build:generate": "pnpm build: && pnpm houdini generate", "build:build": "pnpm build: && pnpm build", - "dev": "vite", + "web": "vite dev", + "dev": "concurrently \"pnpm run web\" \"pnpm run api\" -n \"web,api\" -c \"green,magenta\"", "build": "vite build", "tests": "playwright test ", "test": "npm run tests", @@ -24,6 +26,7 @@ "@pothos/plugin-simple-objects": "^3.7.0", "@whatwg-node/server": "^0.9.14", "cookie": "^0.5.0", + "e2e-api": "workspace:^", "graphql-yoga": "^4.0.4", "houdini": "workspace:^", "houdini-adapter-cloudflare": "workspace:^", diff --git a/e2e/react/schema.graphql b/e2e/react/schema.graphql new file mode 100644 index 000000000..296f94979 --- /dev/null +++ b/e2e/react/schema.graphql @@ -0,0 +1,189 @@ +type A { + a: String! + id: ID! +} + +interface Animal implements Node { + id: ID! + name: String! +} + +interface AnimalConnection { + edges: [AnimalEdge!]! + pageInfo: PageInfo! +} + +interface AnimalEdge { + cursor: String + node: Animal +} + +type B { + b: String! + id: ID! +} + +type Book { + id: ID! + title: String! +} + +type City { + id: ID! + libraries: [Library]! + name: String! +} + +"""Date custom scalar type""" +scalar DateTime + +scalar File + +enum ForceReturn { + ERROR + NORMAL + NULL +} + +type Library { + books: [Book]! + id: ID! + name: String! +} + +type Message1 { + message: String! +} + +type Monkey implements Animal & Node { + hasBanana: Boolean! + id: ID! + name: String! +} + +type MonkeyConnection implements AnimalConnection { + edges: [MonkeyEdge!]! + pageInfo: PageInfo! +} + +type MonkeyEdge implements AnimalEdge { + cursor: String + node: Monkey +} + +type Mutation { + addBook(library: ID!, title: String!): Book! + addCity(name: String!): City! + addLibrary(city: ID!, name: String!): Library! + addNonNullUser(birthDate: DateTime!, delay: Int, enumValue: MyEnum, force: ForceReturn, name: String!, snapshot: String!, types: [TypeOfUser!]): User! + addUser(birthDate: DateTime!, delay: Int, enumValue: MyEnum, force: ForceReturn, name: String!, snapshot: String!, types: [TypeOfUser!]): User + createA(a: String!): A! + createB(b: String!): B! + deleteBook(book: ID!, delay: Int, force: ForceReturn): Book + deleteCity(city: ID!): City! + deleteLibrary(library: ID!): Library! + multipleUpload(files: [File!]!): [String!]! + singleUpload(file: File!): String! + updateRentedBook(bookId: Int!, rate: Int!, userId: String!): RentedBook + updateUser(birthDate: DateTime, delay: Int, id: ID!, name: String, snapshot: String!): User! +} + +enum MyEnum { + Value1 + Value2 +} + +interface Node { + id: ID! +} + +type PageInfo { + endCursor: String + hasNextPage: Boolean! + hasPreviousPage: Boolean! + startCursor: String +} + +type Query { + aOrB: [UnionAorB!]! + animals: AnimalConnection! + avgYearsBirthDate: Float! + cities: [City]! + city(delay: Int, id: ID!): City + hello: String + monkey(id: ID!): Monkey + monkeys: MonkeyConnection! + node(id: ID!): Node + rentedBooks: [RentedBook!]! + session: String + user(delay: Int, forceNullDate: Boolean, id: ID!, snapshot: String!, tmp: Boolean): User! + userNodes(limit: Int = 4, offset: Int, snapshot: String!): UserNodes! + userNodesResult(forceMessage: Boolean!, snapshot: String!): UserNodesResult! + userResult(forceMessage: Boolean!, id: ID!, snapshot: String!): UserResult! + userSearch(filter: UserNameFilter!, snapshot: String!): [User!]! + usersConnection(after: String, before: String, first: Int, last: Int, snapshot: String!): UserConnection! + usersList(limit: Int = 4, offset: Int, snapshot: String!): [User!]! +} + +type RentedBook { + bookId: Int! + rate: Int! + userId: String! +} + +type Subscription { + userUpdate(id: ID!, snapshot: String): User +} + +enum TypeOfUser { + COOL + NICE +} + +union UnionAorB = A | B + +type User implements Node { + avatarURL(size: Int): String! + birthDate: DateTime + enumValue: MyEnum + friendsConnection(after: String, before: String, first: Int, last: Int): UserConnection! + friendsList(limit: Int, offset: Int): [User!]! + id: ID! + name: String! + testField(someParam: Boolean!): String + types: [TypeOfUser!]! + + """ + This is the same list as what's used globally. its here to tests fragments + """ + userSearch(filter: UserNameFilter!, snapshot: String!): [User!]! + + """ + This is the same list as what's used globally. its here to tests fragments + """ + usersConnection(after: String, before: String, first: Int, last: Int): UserConnection! + usersConnectionSnapshot(after: String, before: String, first: Int, last: Int, snapshot: String!): UserConnection! +} + +type UserConnection { + edges: [UserEdge!]! + pageInfo: PageInfo! +} + +type UserEdge { + cursor: String + node: User +} + +input UserNameFilter { + name: String! +} + +type UserNodes { + nodes: [User!]! + totalCount: Int +} + +union UserNodesResult = Message1 | UserNodes + +union UserResult = Message1 | User diff --git a/e2e/react/src/+client.ts b/e2e/react/src/+client.ts index 6e9e95128..d144a7e11 100644 --- a/e2e/react/src/+client.ts +++ b/e2e/react/src/+client.ts @@ -1,4 +1,6 @@ import { HoudiniClient } from '$houdini' // Export the Houdini client -export default new HoudiniClient() +export default new HoudiniClient({ + url: 'http://localhost:4000/graphql', +}) diff --git a/e2e/react/src/api/+schema.ts b/e2e/react/src/api/+schema.ts deleted file mode 100644 index efd977996..000000000 --- a/e2e/react/src/api/+schema.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { builder } from './builder' -import './users' - -export default builder.toSchema() diff --git a/e2e/react/src/api/builder.ts b/e2e/react/src/api/builder.ts deleted file mode 100644 index 6a41d207a..000000000 --- a/e2e/react/src/api/builder.ts +++ /dev/null @@ -1,22 +0,0 @@ -import SchemaBuilder from '@pothos/core' -import RelayPlugin from '@pothos/plugin-relay' -import SimpleObjectsPlugin from '@pothos/plugin-simple-objects' - -import type { User } from './users' - -export const builder = new SchemaBuilder<{ - Objects: { - User: User - } -}>({ - plugins: [SimpleObjectsPlugin, RelayPlugin], - relayOptions: {}, -}) - -builder.queryType({}) - -builder.queryField('hello', (t) => - t.string({ - resolve: () => 'Hello World! // From Houdini!', - }) -) diff --git a/e2e/react/src/api/users.ts b/e2e/react/src/api/users.ts deleted file mode 100644 index f03fa2fd3..000000000 --- a/e2e/react/src/api/users.ts +++ /dev/null @@ -1,141 +0,0 @@ -import { sleep } from '~/utils/sleep' - -import { builder } from './builder' - -export type User = { - id: string - name: string - avatarURL: string -} - -builder.simpleObject('User', { - fields: (t) => ({ - id: t.id(), - name: t.string(), - avatarURL: t.string({ - args: { - size: t.arg.int({ - required: false, - }), - }, - }), - }), -}) - -// example data -const users: User[] = [ - { - id: '1', - name: 'Bruce Willis', - avatarURL: - 'https://variety.com/wp-content/uploads/2022/03/Bruce-Willis.jpg?w=1000&h=562&crop=1', - }, - { - id: '2', - name: 'Samuel Jackson', - avatarURL: 'https://imaging.broadway.com/images/regular-43/w750/122004-11.jpeg', - }, - { - id: '3', - name: 'Morgan Freeman', - avatarURL: - 'https://www.themoviedb.org/t/p/w600_and_h900_bestv2/jPsLqiYGSofU4s6BjrxnefMfabb.jpg', - }, - { - id: '4', - name: 'Tom Hanks', - avatarURL: - 'https://upload.wikimedia.org/wikipedia/commons/thumb/a/a9/Tom_Hanks_TIFF_2019.jpg/440px-Tom_Hanks_TIFF_2019.jpg', - }, - { - id: '5', - name: 'Will Smith', - avatarURL: - 'https://upload.wikimedia.org/wikipedia/commons/thumb/3/3f/TechCrunch_Disrupt_2019_%2848834434641%29_%28cropped%29.jpg/440px-TechCrunch_Disrupt_2019_%2848834434641%29_%28cropped%29.jpg', - }, - { - id: '6', - name: 'Harrison Ford', - avatarURL: - 'https://upload.wikimedia.org/wikipedia/commons/thumb/3/34/Harrison_Ford_by_Gage_Skidmore_3.jpg/1280px-Harrison_Ford_by_Gage_Skidmore_3.jpg', - }, - { - id: '7', - name: 'Eddie Murphy', - avatarURL: - 'https://upload.wikimedia.org/wikipedia/commons/thumb/5/5f/Eddie_Murphy_by_David_Shankbone.jpg/440px-Eddie_Murphy_by_David_Shankbone.jpg', - }, - { - id: '8', - name: 'Clint Eastwood', - avatarURL: - 'https://prod-images.tcm.com/Master-Profile-Images/ClintEastwood.55386.jpg?w=824', - }, -] - -builder.queryField('user', (t) => - t.field({ - type: 'User', - args: { - id: t.arg({ - type: 'ID', - required: true, - }), - delay: t.arg({ - type: 'Int', - }), - snapshot: t.arg({ - type: 'String', - required: true, - }), - }, - resolve: async (_, args) => { - // simulate network delay - if (args.delay) { - await sleep(args.delay) - } - - // look for the user - const user = getUserSnapshot(args.snapshot).find( - (c) => c.id === `${args.snapshot}:${args.id}` - ) - if (!user) { - throw new Error('User not found') - } - return user - }, - }) -) - -builder.queryField('users', (t) => - t.field({ - type: ['User'], - args: { - delay: t.arg.int(), - snapshot: t.arg.string({ required: true }), - }, - resolve: async (_, args) => { - // simulate network delay - if (args.delay) { - await sleep(args.delay) - } - - // look for the user - return getUserSnapshot(args.snapshot) - }, - }) -) - -const userSnapshots: Record = {} - -function getUserSnapshot(snapshot: string) { - if (!userSnapshots[snapshot]) { - userSnapshots[snapshot] = users.map((user) => ({ - ...user, - id: `${snapshot}:${user.id}`, - snapshot, - })) - } - - return userSnapshots[snapshot] -} diff --git a/e2e/react/src/api/utils.ts b/e2e/react/src/api/utils.ts deleted file mode 100644 index 7ed23f7b3..000000000 --- a/e2e/react/src/api/utils.ts +++ /dev/null @@ -1,140 +0,0 @@ -/** - * This file is copied from graphql-relay-js: https://github.com/graphql/graphql-relay-js/blob/main/src/connection/arrayConnection.ts - * It's licensed under the MIT license found at the bottom of the file (per the project's agreement) - */ - -export function connectionFromArray(data: T[], args: any) { - return connectionFromArraySlice(data, args, { - sliceStart: 0, - arrayLength: data.length, - }) -} - -function connectionFromArraySlice(arraySlice: T[], args: any, meta: any) { - const { after, before, first, last } = args - const { sliceStart, arrayLength } = meta - const sliceEnd = sliceStart + arraySlice.length - - let startOffset = Math.max(sliceStart, 0) - let endOffset = Math.min(sliceEnd, arrayLength) - - const afterOffset = getOffsetWithDefault(after, -1) - if (0 <= afterOffset && afterOffset < arrayLength) { - startOffset = Math.max(startOffset, afterOffset + 1) - } - - const beforeOffset = getOffsetWithDefault(before, endOffset) - if (0 <= beforeOffset && beforeOffset < arrayLength) { - endOffset = Math.min(endOffset, beforeOffset) - } - - if (typeof first === 'number') { - if (first < 0) { - throw new Error('Argument "first" must be a non-negative integer') - } - - endOffset = Math.min(endOffset, startOffset + first) - } - if (typeof last === 'number') { - if (last < 0) { - throw new Error('Argument "last" must be a non-negative integer') - } - - startOffset = Math.max(startOffset, endOffset - last) - } - - // If supplied slice is too large, trim it down before mapping over it. - const slice = arraySlice.slice(startOffset - sliceStart, endOffset - sliceStart) - - const edges = slice.map((value, index) => ({ - cursor: offsetToCursor(startOffset + index), - node: value, - })) - - const firstEdge = edges[0] - const lastEdge = edges[edges.length - 1] - const lowerBound = 0 - const upperBound = arrayLength - - return { - edges, - pageInfo: { - startCursor: firstEdge ? firstEdge.cursor : null, - endCursor: lastEdge ? lastEdge.cursor : null, - hasPreviousPage: startOffset > lowerBound, - hasNextPage: endOffset < upperBound, - }, - } -} -const PREFIX = 'arrayconnection:' - -/** - * Creates the cursor string from an offset. - */ -export function offsetToCursor(offset: number) { - return base64(PREFIX + offset.toString()) -} - -/** - * Extracts the offset from the cursor string. - */ -export function cursorToOffset(cursor: string) { - return parseInt(unbase64(cursor).substring(PREFIX.length), 10) -} - -/** - * Return the cursor associated with an object in an array. - */ -export function cursorForObjectInConnection(data: any, object: number) { - const offset = data.indexOf(object) - if (offset === -1) { - return null - } - return offsetToCursor(offset) -} - -/** - * Given an optional cursor and a default offset, returns the offset - * to use; if the cursor contains a valid offset, that will be used, - * otherwise it will be the default. - */ -export function getOffsetWithDefault(cursor: string, defaultOffset: number) { - if (typeof cursor !== 'string') { - return defaultOffset - } - const offset = cursorToOffset(cursor) - return isNaN(offset) ? defaultOffset : offset -} - -function base64(str: string) { - return btoa(str) -} - -function unbase64(str: string) { - return atob(str) -} - -/** - * -MIT License - -Copyright (c) GraphQL Contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ diff --git a/e2e/react/src/routes/component_fields/arguments/+page.gql b/e2e/react/src/routes/component_fields/arguments/+page.gql index f451f6c92..a9b7ce929 100644 --- a/e2e/react/src/routes/component_fields/arguments/+page.gql +++ b/e2e/react/src/routes/component_fields/arguments/+page.gql @@ -1,5 +1,5 @@ query features__component_fields__arguments { - users(snapshot: "componentFields_arguments") { + usersList(snapshot: "componentFields_arguments") { name ...CF_A_UserInfo } diff --git a/e2e/react/src/routes/component_fields/arguments/+page.tsx b/e2e/react/src/routes/component_fields/arguments/+page.tsx index 5b162a7f1..3e465468c 100644 --- a/e2e/react/src/routes/component_fields/arguments/+page.tsx +++ b/e2e/react/src/routes/component_fields/arguments/+page.tsx @@ -4,7 +4,7 @@ import CF_A_UserInfo from './CF_A_UserInfo' export default function ({ features__component_fields__arguments }: PageProps) { return (
- {features__component_fields__arguments.users.map((user) => { + {features__component_fields__arguments.usersList.map((user) => { return (
diff --git a/e2e/react/src/routes/component_fields/simple/+page.gql b/e2e/react/src/routes/component_fields/simple/+page.gql index 855ae0a9e..b7d6b2afd 100644 --- a/e2e/react/src/routes/component_fields/simple/+page.gql +++ b/e2e/react/src/routes/component_fields/simple/+page.gql @@ -1,5 +1,5 @@ query features__component_fields__simple { - users(snapshot: "componentFields_simple") { + usersList(snapshot: "componentFields_simple") { name ...UserInfo } diff --git a/e2e/react/src/routes/component_fields/simple/+page.tsx b/e2e/react/src/routes/component_fields/simple/+page.tsx index 938c6548c..4011127fc 100644 --- a/e2e/react/src/routes/component_fields/simple/+page.tsx +++ b/e2e/react/src/routes/component_fields/simple/+page.tsx @@ -4,7 +4,7 @@ import UserInfo from './UserInfo' export default ({ features__component_fields__simple }: PageProps) => { return (
- {features__component_fields__simple.users.map((user) => { + {features__component_fields__simple.usersList.map((user) => { return (
diff --git a/e2e/react/src/routes/scalars/+page.gql b/e2e/react/src/routes/scalars/+page.gql new file mode 100644 index 000000000..812e911b2 --- /dev/null +++ b/e2e/react/src/routes/scalars/+page.gql @@ -0,0 +1,6 @@ +query scalar_test { + usersList(snapshot: "componentFields_simple") { + name + birthDate + } +} diff --git a/e2e/react/src/routes/scalars/+page.tsx b/e2e/react/src/routes/scalars/+page.tsx new file mode 100644 index 000000000..9886c8dd3 --- /dev/null +++ b/e2e/react/src/routes/scalars/+page.tsx @@ -0,0 +1,13 @@ +import { PageProps } from './$types' + +export default ({ scalar_test }: PageProps) => { + return ( +
+ {scalar_test.usersList.map((user) => ( +
+ {user.name} - {user.birthDate?.toLocaleDateString()} +
+ ))} +
+ ) +} diff --git a/e2e/react/src/routes/scalars/test.ts b/e2e/react/src/routes/scalars/test.ts new file mode 100644 index 000000000..88107f81c --- /dev/null +++ b/e2e/react/src/routes/scalars/test.ts @@ -0,0 +1,9 @@ +import { expect, test } from '@playwright/test' +import { routes } from '~/utils/routes' +import { goto } from '~/utils/testsHelper.js' + +test('Scalars', async ({ page }) => { + await goto(page, routes.scalars) + + expect(page.textContent('#result')).toMatchSnapshot() +}) diff --git a/e2e/react/src/utils/routes.ts b/e2e/react/src/utils/routes.ts index f793437d2..9d1040ab7 100644 --- a/e2e/react/src/utils/routes.ts +++ b/e2e/react/src/utils/routes.ts @@ -1,5 +1,6 @@ export const routes = { - hello: '/hello', + hello: '/hello-world', + scalars: 'scalars', componentFields_simple: '/component_fields/simple', componentFields_arguments: '/component_fields/arguments', route_params: '/route_params/1', diff --git a/packages/houdini-react/src/plugin/vite.tsx b/packages/houdini-react/src/plugin/vite.tsx index 5f1a90e1a..99d63c8b8 100644 --- a/packages/houdini-react/src/plugin/vite.tsx +++ b/packages/houdini-react/src/plugin/vite.tsx @@ -125,7 +125,7 @@ export default { process.env.HOUDINI_SECONDARY_BUILD = 'ssr' // in order to build the server-side of the application, we need to - // treat every file as an independent entry point and disable + // treat every file as an independent entry point and disable bundling await build({ build: { ssr: true, @@ -225,6 +225,15 @@ export default { }) window.__houdini__hydration__layer__ ??= window.__houdini__cache__._internal_unstable.storage.createLayer(true) + // link up the cache we just created with the client + window.__houdini__client__.setCache(window.__houdini__cache__) + + // hydrate the cache with the information from the initial payload + window.__houdini__cache__?.hydrate( + window.__houdini__initial__cache__, + window.__houdini__hydration__layer__ + ) + // the artifacts are the source of the zip (without them, we can't prime either cache) for (const [artifactName, artifact] of Object.entries(window.__houdini__pending_artifacts__ ?? {})) { // save the value in the initial artifact cache @@ -233,13 +242,14 @@ export default { // if we also have data for the artifact, save it in the initial data cache if (window.__houdini__pending_data__?.[artifactName]) { const variables = window.__houdini__pending_variables__[artifactName] + if (artifact.hasComponents) { // we need to walk down the artifacts selection and instantiate any component fields injectComponents({ cache: window.__houdini__cache__, selection: artifact.selection, data: window.__houdini__pending_data__[artifactName], - variables: window.__houdini__pending_variables__[artifactName], + variables, }) } @@ -247,7 +257,7 @@ export default { const observer = window.__houdini__client__.observe({ artifact, cache: window.__houdini__cache__, - initialValue: window.__houdini__pending_data__[artifactName], + initialValue: window.__houdini__cache__.read({selection: artifact.selection, variables}).data, initialVariables: variables, }) @@ -271,12 +281,6 @@ export default { }) } - // hydrate the cache with the information from the initial payload - window.__houdini__cache__?.hydrate( - window.__houdini__initial__cache__, - window.__houdini__hydration__layer__ - ) - // get the initial url from the window const url = window.location.pathname diff --git a/packages/houdini-react/src/runtime/routing/Router.tsx b/packages/houdini-react/src/runtime/routing/Router.tsx index 7f15dc135..7ab18e807 100644 --- a/packages/houdini-react/src/runtime/routing/Router.tsx +++ b/packages/houdini-react/src/runtime/routing/Router.tsx @@ -3,6 +3,7 @@ import { DocumentStore, HoudiniClient } from '$houdini/runtime/client' import configFile from '$houdini/runtime/imports/config' import { deepEquals } from '$houdini/runtime/lib/deepEquals' import { LRUCache } from '$houdini/runtime/lib/lru' +import { marshalSelection, marshalInputs } from '$houdini/runtime/lib/scalars' import { GraphQLObject, GraphQLVariables } from '$houdini/runtime/lib/types' import { QueryArtifact } from '$houdini/runtime/lib/types' import { find_match } from '$houdini/runtime/router/match' @@ -197,7 +198,7 @@ function usePageData({ cacheParams: { disableSubscriptions: true }, session, }) - .then(() => { + .then(async () => { data_cache.set(id, observer) // if there is an error, we need to reject the promise @@ -215,7 +216,12 @@ function usePageData({ window.__houdini__cache__?.hydrate(${cache.serialize()}, window.__houdini__hydration__layer) const artifactName = "${artifact.name}" - const value = ${JSON.stringify(observer.state.data)} + const value = ${JSON.stringify( + await marshalSelection({ + selection: observer.artifact.selection, + data: observer.state.data, + }) + )} // if the data is pending, we need to resolve it if (window.__houdini__nav_caches__?.data_cache.has(artifactName)) { @@ -224,7 +230,18 @@ function usePageData({ const new_store = window.__houdini__client__.observe({ artifact: window.__houdini__nav_caches__.artifact_cache.get(artifactName), cache: window.__houdini__cache__, - initialValue: value, + }) + + // we're pushing this store onto the client, it should be initialized + window.__houdini__nav_caches__.data_cache.get(artifactName).send({ + setup: true, + variables: ${JSON.stringify( + marshalInputs({ + artifact: observer.artifact, + input: variables, + config: configFile, + }) + )} }) window.__houdini__nav_caches__?.data_cache.set(artifactName, new_store) @@ -244,12 +261,12 @@ function usePageData({ if (!window.__houdini__pending_artifacts__) { window.__houdini__pending_artifacts__ = {} } - - window.__houdini__pending_variables__[artifactName] = ${JSON.stringify(variables)} - window.__houdini__pending_data__[artifactName] = value - window.__houdini__pending_artifacts__[artifactName] = ${JSON.stringify(artifact)} } + window.__houdini__pending_variables__[artifactName] = ${JSON.stringify(observer.state.variables)} + window.__houdini__pending_data__[artifactName] = value + window.__houdini__pending_artifacts__[artifactName] = ${JSON.stringify(artifact)} + // if this payload finishes off an ssr request, we need to resolve the signal if (window.__houdini__nav_caches__?.ssr_signals.has(artifactName)) { @@ -258,7 +275,13 @@ function usePageData({ // we're pushing this store onto the client, it should be initialized window.__houdini__nav_caches__.data_cache.get(artifactName).send({ setup: true, - variables: ${JSON.stringify(variables)} + variables: ${JSON.stringify( + marshalInputs({ + artifact: observer.artifact, + input: variables, + config: configFile, + }) + )} }) } @@ -541,6 +564,7 @@ export function useSession(): [App.Session, (newSession: Partial) = body: JSON.stringify(newSession), headers: { 'Content-Type': 'application/json', + Accept: 'application/json', }, }) } diff --git a/packages/houdini/src/codegen/generators/typescript/types.ts b/packages/houdini/src/codegen/generators/typescript/types.ts index 87f43b802..f8d6000a5 100644 --- a/packages/houdini/src/codegen/generators/typescript/types.ts +++ b/packages/houdini/src/codegen/generators/typescript/types.ts @@ -53,16 +53,17 @@ export function scalarPropertyValue( let sourcePath = path.join(sourcePathParsed.dir, sourcePathParsed.name) // add the import - const localImport = ensureImports({ - config, - body, - import: '__component__' + component.fragment, - sourceModule: path.join( - path.relative(path.dirname(filepath), config.projectRoot), - 'src', - sourcePath - ), - }) + const localImport = + ensureImports({ + config, + body, + import: '__component__' + component.fragment, + sourceModule: path.join( + path.relative(path.dirname(filepath), config.projectRoot), + 'src', + sourcePath + ), + }) ?? '__component__' + component.fragment // build up the AST for the parameter type const parameters = AST.tsTypeReference(AST.identifier('Parameters')) diff --git a/packages/houdini/src/runtime/client/documentStore.test.ts b/packages/houdini/src/runtime/client/documentStore.test.ts index 9fc617923..a3a5e1011 100644 --- a/packages/houdini/src/runtime/client/documentStore.test.ts +++ b/packages/houdini/src/runtime/client/documentStore.test.ts @@ -1186,6 +1186,8 @@ export function createStore( date1: 'Date', date2: 'Date', }, + defaults: {}, + runtimeScalars: {}, }, pluginData: {}, }, @@ -1219,6 +1221,8 @@ export function createFragmentStore( date1: 'Date', date2: 'Date', }, + defaults: {}, + runtimeScalars: {}, }, pluginData: {}, }, diff --git a/packages/houdini/src/runtime/client/index.ts b/packages/houdini/src/runtime/client/index.ts index 566e49aeb..d3abd7d55 100644 --- a/packages/houdini/src/runtime/client/index.ts +++ b/packages/houdini/src/runtime/client/index.ts @@ -1,4 +1,5 @@ /// +import cacheRef from '../cache' import type { Cache } from '../cache/cache' import { getCurrentConfig, localApiEndpoint } from '../lib' import { flatten } from '../lib/flatten' @@ -45,12 +46,15 @@ export class HoudiniClient { // the URL of the api url: string - // the list of plugins for the client - readonly plugins: ClientPlugin[] - // expose operations settings readonly throwOnError_operations: ThrowOnErrorOperations[] + private cache: Cache | null = null + private throwOnError: ThrowOnErrorParams | undefined + private fetchParams: FetchParamFn | undefined + private pipeline: NestedList | undefined + private extraPlugins: NestedList | undefined + proxies: Record< string, (operation: { @@ -64,6 +68,11 @@ export class HoudiniClient { // this is modified by page entries when they load in order to register the components source componentCache: Record = {} + // we need the ability to link the client up with an external cache + setCache(cache: Cache) { + this.cache = cache + } + constructor({ url, fetchParams, @@ -80,26 +89,40 @@ export class HoudiniClient { this.throwOnError_operations = throwOnError?.operations ?? [] - // a few middlewares _have_ to run to setup the pipeline - this.plugins = flatten( + let serverPort = globalThis.process?.env?.HOUDINI_PORT ?? '5173' + + // if there is no url provided then assume we are using the internal local api + this.url = + url ?? + (globalThis.window ? '' : `https://localhost:${serverPort}`) + + localApiEndpoint(getCurrentConfig()) + + this.throwOnError = throwOnError + this.fetchParams = fetchParams + this.pipeline = pipeline + this.extraPlugins = plugins + } + + get plugins(): ClientPlugin[] { + return flatten( ([] as NestedList).concat( // if they specified a throw behavior - throwOnError ? [throwOnErrorPlugin(throwOnError)] : [], - fetchParamsPlugin(fetchParams), + this.throwOnError ? [throwOnErrorPlugin(this.throwOnError)] : [], + fetchParamsPlugin(this.fetchParams), // if the user wants to specify the entire pipeline, let them do so - pipeline ?? + this.pipeline ?? // the user doesn't have a specific pipeline so we should just add their desired plugins // to the standard set ( [ // make sure that documents always work - queryPlugin, - mutationPlugin, - fragmentPlugin, + queryPlugin(this.cache ?? cacheRef), + mutationPlugin(this.cache ?? cacheRef), + fragmentPlugin(this.cache ?? cacheRef), ] as NestedList ).concat( // add the specified middlewares - plugins ?? [], + this.extraPlugins ?? [], // and any middlewares we got from plugins pluginsFromPlugins, // if they provided a fetch function, use it as the body for the fetch middleware @@ -107,14 +130,6 @@ export class HoudiniClient { ) ) ) - - let serverPort = globalThis.process?.env?.HOUDINI_PORT ?? '5173' - - // if there is no url provided then assume we are using the internal local api - this.url = - url ?? - (globalThis.window ? '' : `https://localhost:${serverPort}`) + - localApiEndpoint(getCurrentConfig()) } observe<_Data extends GraphQLObject, _Input extends GraphQLVariables>({ @@ -127,6 +142,7 @@ export class HoudiniClient { plugins: createPluginHooks(this.plugins), fetching, enableCache, + cache: this.cache ?? undefined, ...rest, }) } diff --git a/packages/houdini/src/runtime/client/plugins/fragment.ts b/packages/houdini/src/runtime/client/plugins/fragment.ts index 6fbcd8aa8..acbeeef09 100644 --- a/packages/houdini/src/runtime/client/plugins/fragment.ts +++ b/packages/houdini/src/runtime/client/plugins/fragment.ts @@ -1,79 +1,79 @@ -import cache from '../../cache' +import type { Cache } from '../../cache/cache' import { deepEquals } from '../../lib/deepEquals' import { type SubscriptionSpec, ArtifactKind, DataSource } from '../../lib/types' -import type { ClientPlugin } from '../documentStore' import { documentPlugin } from '../utils' // the purpose of the fragment plugin is to provide fine-reactivity for cache updates // there are no network requests that get sent. send() always returns the initial value -export const fragment: ClientPlugin = documentPlugin(ArtifactKind.Fragment, function () { - // track the bits of state we need to hold onto - let subscriptionSpec: SubscriptionSpec | null = null +export const fragment = (cache: Cache) => + documentPlugin(ArtifactKind.Fragment, function () { + // track the bits of state we need to hold onto + let subscriptionSpec: SubscriptionSpec | null = null - // we need to track the last parents and variables used so we can re-subscribe - let lastReference: { parent: string; variables: any } | null = null + // we need to track the last parents and variables used so we can re-subscribe + let lastReference: { parent: string; variables: any } | null = null - return { - // establish the cache subscription - start(ctx, { next, resolve, variablesChanged, marshalVariables }) { - // if there's no parent id, there's nothing to do - if (!ctx.stuff.parentID) { - return next(ctx) - } - - // the object describing the current parent reference - const currentReference = { - parent: ctx.stuff.parentID, - variables: marshalVariables(ctx), - } + return { + // establish the cache subscription + start(ctx, { next, resolve, variablesChanged, marshalVariables }) { + // if there's no parent id, there's nothing to do + if (!ctx.stuff.parentID) { + return next(ctx) + } - // if the variables have changed we need to setup a new subscription with the cache - if ( - !ctx.cacheParams?.disableSubscriptions && - (!deepEquals(lastReference, currentReference) || variablesChanged(ctx)) - ) { - // if the variables changed we need to unsubscribe from the old fields and - // listen to the new ones - if (subscriptionSpec) { - cache.unsubscribe(subscriptionSpec, subscriptionSpec.variables?.() || {}) + // the object describing the current parent reference + const currentReference = { + parent: ctx.stuff.parentID, + variables: marshalVariables(ctx), } - // we need to subscribe with the marshaled variables - const variables = marshalVariables(ctx) + // if the variables have changed we need to setup a new subscription with the cache + if ( + !ctx.cacheParams?.disableSubscriptions && + (!deepEquals(lastReference, currentReference) || variablesChanged(ctx)) + ) { + // if the variables changed we need to unsubscribe from the old fields and + // listen to the new ones + if (subscriptionSpec) { + cache.unsubscribe(subscriptionSpec, subscriptionSpec.variables?.() || {}) + } - // save the new subscription spec - subscriptionSpec = { - rootType: ctx.artifact.rootType, - selection: ctx.artifact.selection, - variables: () => variables, - parentID: ctx.stuff.parentID, - set: (newValue) => { - resolve(ctx, { - data: newValue, - errors: null, - fetching: false, - partial: false, - stale: false, - source: DataSource.Cache, - variables, - }) - }, - } + // we need to subscribe with the marshaled variables + const variables = marshalVariables(ctx) - // make sure we subscribe to the new values - cache.subscribe(subscriptionSpec, variables) + // save the new subscription spec + subscriptionSpec = { + rootType: ctx.artifact.rootType, + selection: ctx.artifact.selection, + variables: () => variables, + parentID: ctx.stuff.parentID, + set: (newValue) => { + resolve(ctx, { + data: newValue, + errors: null, + fetching: false, + partial: false, + stale: false, + source: DataSource.Cache, + variables, + }) + }, + } - lastReference = currentReference - } + // make sure we subscribe to the new values + cache.subscribe(subscriptionSpec, variables) - // we're done - next(ctx) - }, + lastReference = currentReference + } + + // we're done + next(ctx) + }, - cleanup() { - if (subscriptionSpec) { - cache.unsubscribe(subscriptionSpec, subscriptionSpec.variables?.()) - } - }, - } -}) + cleanup() { + if (subscriptionSpec) { + cache.unsubscribe(subscriptionSpec, subscriptionSpec.variables?.()) + } + }, + } + }) diff --git a/packages/houdini/src/runtime/client/plugins/mutation.ts b/packages/houdini/src/runtime/client/plugins/mutation.ts index ce04880a7..0f50245f2 100644 --- a/packages/houdini/src/runtime/client/plugins/mutation.ts +++ b/packages/houdini/src/runtime/client/plugins/mutation.ts @@ -1,95 +1,96 @@ -import cache from '../../cache' +import type { Cache } from '../../cache/cache' import { marshalSelection } from '../../lib/scalars' import type { SubscriptionSpec } from '../../lib/types' import { ArtifactKind } from '../../lib/types' import { documentPlugin } from '../utils' -export const mutation = documentPlugin(ArtifactKind.Mutation, () => { - return { - async start(ctx, { next, marshalVariables }) { - // treat a mutation like it has an optimistic layer regardless of - // whether there actually _is_ one. This ensures that a query which fires - // after this mutation has been sent will overwrite any return values from the mutation - // - // as far as I can tell, this is an arbitrary decision but it does give a - // well-defined ordering to a subtle situation so that seems like a win - const layerOptimistic = cache._internal_unstable.storage.createLayer(true) +export const mutation = (cache: Cache) => + documentPlugin(ArtifactKind.Mutation, () => { + return { + async start(ctx, { next, marshalVariables }) { + // treat a mutation like it has an optimistic layer regardless of + // whether there actually _is_ one. This ensures that a query which fires + // after this mutation has been sent will overwrite any return values from the mutation + // + // as far as I can tell, this is an arbitrary decision but it does give a + // well-defined ordering to a subtle situation so that seems like a win + const layerOptimistic = cache._internal_unstable.storage.createLayer(true) - // the optimistic response gets passed in the context's stuff bag - const optimisticResponse = ctx.stuff.optimisticResponse + // the optimistic response gets passed in the context's stuff bag + const optimisticResponse = ctx.stuff.optimisticResponse - // if there is an optimistic response then we need to write the value immediately + // if there is an optimistic response then we need to write the value immediately - // hold onto the list of subscribers that we updated because of the optimistic response - // and make sure they are included in the final set of subscribers to notify - let toNotify: SubscriptionSpec[] = [] - if (optimisticResponse) { - toNotify = cache.write({ - selection: ctx.artifact.selection, - // make sure that any scalar values get processed into something we can cache - data: (await marshalSelection({ + // hold onto the list of subscribers that we updated because of the optimistic response + // and make sure they are included in the final set of subscribers to notify + let toNotify: SubscriptionSpec[] = [] + if (optimisticResponse) { + toNotify = cache.write({ selection: ctx.artifact.selection, - data: optimisticResponse, - }))!, - variables: marshalVariables(ctx), - layer: layerOptimistic.id, - }) - } - - // update cacheParams - ctx.cacheParams = { - ...ctx.cacheParams, - // write to the mutation's layer - layer: layerOptimistic, - // notify any subscribers that we updated with the optimistic response - // in order to address situations where the optimistic update was wrong - notifySubscribers: toNotify, - // make sure that we notify subscribers for any values that we compare - // in order to address any race conditions when comparing the previous value - forceNotify: true, - } + // make sure that any scalar values get processed into something we can cache + data: (await marshalSelection({ + selection: ctx.artifact.selection, + data: optimisticResponse, + }))!, + variables: marshalVariables(ctx), + layer: layerOptimistic.id, + }) + } - // make sure we write to the correct layer in the cache - next(ctx) - }, - afterNetwork(ctx, { resolve }) { - // before the cache sees the data, we need to clear the layer - if (ctx.cacheParams?.layer) { - cache.clearLayer(ctx.cacheParams.layer.id) - } + // update cacheParams + ctx.cacheParams = { + ...ctx.cacheParams, + // write to the mutation's layer + layer: layerOptimistic, + // notify any subscribers that we updated with the optimistic response + // in order to address situations where the optimistic update was wrong + notifySubscribers: toNotify, + // make sure that we notify subscribers for any values that we compare + // in order to address any race conditions when comparing the previous value + forceNotify: true, + } - // we're done - resolve(ctx) - }, - end(ctx, { resolve, value }) { - const hasErrors = value.errors && value.errors.length > 0 - // if there are errors, we need to clear the layer before resolving - if (hasErrors) { - // if the mutation failed, roll the layer back and delete it + // make sure we write to the correct layer in the cache + next(ctx) + }, + afterNetwork(ctx, { resolve }) { + // before the cache sees the data, we need to clear the layer if (ctx.cacheParams?.layer) { cache.clearLayer(ctx.cacheParams.layer.id) } - } - // merge the layer back into the cache - if (ctx.cacheParams?.layer) { - cache._internal_unstable.storage.resolveLayer(ctx.cacheParams.layer.id) - } + // we're done + resolve(ctx) + }, + end(ctx, { resolve, value }) { + const hasErrors = value.errors && value.errors.length > 0 + // if there are errors, we need to clear the layer before resolving + if (hasErrors) { + // if the mutation failed, roll the layer back and delete it + if (ctx.cacheParams?.layer) { + cache.clearLayer(ctx.cacheParams.layer.id) + } + } - // keep going - resolve(ctx) - }, - catch(ctx, { error }) { - // if there was an error, we need to clear the mutation - if (ctx.cacheParams?.layer) { - const { layer } = ctx.cacheParams + // merge the layer back into the cache + if (ctx.cacheParams?.layer) { + cache._internal_unstable.storage.resolveLayer(ctx.cacheParams.layer.id) + } + + // keep going + resolve(ctx) + }, + catch(ctx, { error }) { + // if there was an error, we need to clear the mutation + if (ctx.cacheParams?.layer) { + const { layer } = ctx.cacheParams - // if the mutation failed, roll the layer back and delete it - cache.clearLayer(layer.id) - cache._internal_unstable.storage.resolveLayer(layer.id) - } + // if the mutation failed, roll the layer back and delete it + cache.clearLayer(layer.id) + cache._internal_unstable.storage.resolveLayer(layer.id) + } - throw error - }, - } -}) + throw error + }, + } + }) diff --git a/packages/houdini/src/runtime/client/plugins/query.test.ts b/packages/houdini/src/runtime/client/plugins/query.test.ts index 6bbe09148..5201842b9 100644 --- a/packages/houdini/src/runtime/client/plugins/query.test.ts +++ b/packages/houdini/src/runtime/client/plugins/query.test.ts @@ -1,6 +1,7 @@ import { beforeEach, expect, test, vi } from 'vitest' import { testConfigFile } from '../../../test' +import { Cache } from '../../cache/cache' import { setMockConfig } from '../../lib/config' import { createStore, fakeFetch } from './cache.test' import { query } from './query' @@ -13,6 +14,8 @@ beforeEach(async () => { test('query plugin evaluates runtime scalars', async function () { const fetchSpy = vi.fn() + const cache = new Cache() + const store = createStore({ artifact: { kind: 'HoudiniQuery', @@ -63,7 +66,7 @@ test('query plugin evaluates runtime scalars', async function () { }, }, }, - pipeline: [query, fakeFetch({ spy: fetchSpy })], + pipeline: [query(cache), fakeFetch({ spy: fetchSpy })], }) // run the query with an artifact that contains runtime scalars diff --git a/packages/houdini/src/runtime/client/plugins/query.ts b/packages/houdini/src/runtime/client/plugins/query.ts index 381ef4f86..0ac965ba0 100644 --- a/packages/houdini/src/runtime/client/plugins/query.ts +++ b/packages/houdini/src/runtime/client/plugins/query.ts @@ -1,92 +1,92 @@ -import cache from '../../cache' +import type { Cache } from '../../cache/cache' import type { RuntimeScalarPayload } from '../../lib' import { type SubscriptionSpec, ArtifactKind, DataSource } from '../../lib/types' -import type { ClientPlugin } from '../documentStore' import { documentPlugin } from '../utils' -export const query: ClientPlugin = documentPlugin(ArtifactKind.Query, function () { - // track the bits of state we need to hold onto - let subscriptionSpec: SubscriptionSpec | null = null +export const query = (cache: Cache) => + documentPlugin(ArtifactKind.Query, function () { + // track the bits of state we need to hold onto + let subscriptionSpec: SubscriptionSpec | null = null - // remember the last variables we were called with - let lastVariables: Record | null = null + // remember the last variables we were called with + let lastVariables: Record | null = null - // the function to call when a query is sent - return { - start(ctx, { next }) { - const runtimeScalarPayload: RuntimeScalarPayload = { - session: ctx.session, - } + // the function to call when a query is sent + return { + start(ctx, { next }) { + const runtimeScalarPayload: RuntimeScalarPayload = { + session: ctx.session, + } + + // make sure to include the last variables as well as the new ones + ctx.variables = { + ...lastVariables, + // we need to evaluate any runtime scalars but allow the user to overwrite them + // by explicitly passing variables + ...Object.fromEntries( + Object.entries(ctx.artifact.input?.runtimeScalars ?? {}).map( + ([field, type]) => { + const runtimeScalar = ctx.config.features?.runtimeScalars?.[type] + // make typescript happy + if (!runtimeScalar) { + return [field, type] + } - // make sure to include the last variables as well as the new ones - ctx.variables = { - ...lastVariables, - // we need to evaluate any runtime scalars but allow the user to overwrite them - // by explicitly passing variables - ...Object.fromEntries( - Object.entries(ctx.artifact.input?.runtimeScalars ?? {}).map( - ([field, type]) => { - const runtimeScalar = ctx.config.features?.runtimeScalars?.[type] - // make typescript happy - if (!runtimeScalar) { - return [field, type] + // resolve the runtime scalar + return [field, runtimeScalar.resolve(runtimeScalarPayload)] } + ) + ), + ...ctx.variables, + } + next(ctx) + }, - // resolve the runtime scalar - return [field, runtimeScalar.resolve(runtimeScalarPayload)] - } - ) - ), - ...ctx.variables, - } - next(ctx) - }, + // patch subscriptions on the way out so that we don't get a cache update + // before the promise resolves + end(ctx, { resolve, marshalVariables, variablesChanged }) { + // if the variables have changed we need to setup a new subscription with the cache + if (variablesChanged(ctx) && !ctx.cacheParams?.disableSubscriptions) { + // if the variables changed we need to unsubscribe from the old fields and + // listen to the new ones + if (subscriptionSpec) { + cache.unsubscribe(subscriptionSpec, subscriptionSpec.variables?.() || {}) + } - // patch subscriptions on the way out so that we don't get a cache update - // before the promise resolves - end(ctx, { resolve, marshalVariables, variablesChanged }) { - // if the variables have changed we need to setup a new subscription with the cache - if (variablesChanged(ctx) && !ctx.cacheParams?.disableSubscriptions) { - // if the variables changed we need to unsubscribe from the old fields and - // listen to the new ones - if (subscriptionSpec) { - cache.unsubscribe(subscriptionSpec, subscriptionSpec.variables?.() || {}) - } + // track the new variables + lastVariables = { ...marshalVariables(ctx) } - // track the new variables - lastVariables = { ...marshalVariables(ctx) } + const variables = lastVariables + // save the new subscription spec + subscriptionSpec = { + rootType: ctx.artifact.rootType, + selection: ctx.artifact.selection, + variables: () => variables, + set: (newValue) => { + resolve(ctx, { + data: newValue, + errors: null, + fetching: false, + partial: false, + stale: false, + source: DataSource.Cache, + variables: ctx.variables ?? {}, + }) + }, + } - const variables = lastVariables - // save the new subscription spec - subscriptionSpec = { - rootType: ctx.artifact.rootType, - selection: ctx.artifact.selection, - variables: () => variables, - set: (newValue) => { - resolve(ctx, { - data: newValue, - errors: null, - fetching: false, - partial: false, - stale: false, - source: DataSource.Cache, - variables: ctx.variables ?? {}, - }) - }, + // make sure we subscribe to the new values + cache.subscribe(subscriptionSpec, lastVariables) } - // make sure we subscribe to the new values - cache.subscribe(subscriptionSpec, lastVariables) - } - - // we are done - resolve(ctx) - }, - cleanup() { - if (subscriptionSpec) { - cache.unsubscribe(subscriptionSpec, subscriptionSpec.variables?.()) - lastVariables = null - } - }, - } -}) + // we are done + resolve(ctx) + }, + cleanup() { + if (subscriptionSpec) { + cache.unsubscribe(subscriptionSpec, subscriptionSpec.variables?.()) + lastVariables = null + } + }, + } + }) diff --git a/packages/houdini/src/runtime/lib/scalars.ts b/packages/houdini/src/runtime/lib/scalars.ts index bf1a8744d..18dad9d34 100644 --- a/packages/houdini/src/runtime/lib/scalars.ts +++ b/packages/houdini/src/runtime/lib/scalars.ts @@ -1,12 +1,13 @@ import { getCurrentConfig } from './config' import type { ConfigFile } from './config' import { getFieldsForType } from './selection' -import type { - FragmentArtifact, - MutationArtifact, - QueryArtifact, - SubscriptionArtifact, - SubscriptionSelection, +import { + fragmentKey, + type FragmentArtifact, + type MutationArtifact, + type QueryArtifact, + type SubscriptionArtifact, + type SubscriptionSelection, } from './types' export async function marshalSelection({ @@ -34,6 +35,11 @@ export async function marshalSelection({ return Object.fromEntries( await Promise.all( Object.entries(data as {}).map(async ([fieldName, value]) => { + // leave the fragment entry alone + if (fieldName === fragmentKey) { + return [fieldName, value] + } + // look up the type for the field const { type, selection } = targetSelection[fieldName] // if we don't have type information for this field, just use it directly diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3fa60743f..f12615edf 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -208,6 +208,9 @@ importers: cookie: specifier: ^0.5.0 version: 0.5.0 + e2e-api: + specifier: workspace:^ + version: link:../_api graphql-yoga: specifier: ^4.0.4 version: 4.0.4(graphql@15.5.0) @@ -251,9 +254,6 @@ importers: cross-env: specifier: ^7.0.3 version: 7.0.3 - e2e-api: - specifier: workspace:^ - version: link:../_api hono: specifier: ^3.6.0 version: 3.6.0 @@ -2303,7 +2303,7 @@ packages: resolution: {integrity: sha512-SKlIcMA71Dha5JnEWlw4XxcaJ+YupuXg0QCZgl2TOLFz4SkGCwU/geAsJvUJFwK2RbVLpQv/UMq67lOaBuwDtg==} engines: {node: '>=16.0.0'} peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + graphql: 15.5.0 dependencies: '@graphql-tools/utils': 10.0.6(graphql@15.5.0) '@graphql-typed-document-node/core': 3.2.0(graphql@15.5.0) @@ -2316,7 +2316,7 @@ packages: /@graphql-tools/merge@8.3.14(graphql@15.5.0): resolution: {integrity: sha512-zV0MU1DnxJLIB0wpL4N3u21agEiYFsjm6DI130jqHpwF0pR9HkF+Ni65BNfts4zQelP0GjkHltG+opaozAJ1NA==} peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + graphql: 15.5.0 dependencies: '@graphql-tools/utils': 9.1.3(graphql@15.5.0) graphql: 15.5.0 @@ -2327,7 +2327,7 @@ packages: resolution: {integrity: sha512-J7/xqjkGTTwOJmaJQJ2C+VDBDOWJL3lKrHJN4yMaRLAJH3PosB7GiPRaSDZdErs0+F77sH2MKs2haMMkywzx7Q==} engines: {node: '>=16.0.0'} peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + graphql: 15.5.0 dependencies: '@graphql-tools/utils': 10.0.6(graphql@15.5.0) graphql: 15.5.0 @@ -2338,7 +2338,7 @@ packages: resolution: {integrity: sha512-kf3qOXMFcMs2f/S8Y3A8fm/2w+GaHAkfr3Gnhh2LOug/JgpY/ywgFVxO3jOeSpSEdoYcDKLcXVjMigNbY4AdQg==} engines: {node: '>=16.0.0'} peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + graphql: 15.5.0 dependencies: '@graphql-tools/merge': 9.0.0(graphql@15.5.0) '@graphql-tools/utils': 10.0.6(graphql@15.5.0) @@ -2350,7 +2350,7 @@ packages: /@graphql-tools/schema@9.0.12(graphql@15.5.0): resolution: {integrity: sha512-DmezcEltQai0V1y96nwm0Kg11FDS/INEFekD4nnVgzBqawvznWqK6D6bujn+cw6kivoIr3Uq//QmU/hBlBzUlQ==} peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + graphql: 15.5.0 dependencies: '@graphql-tools/merge': 8.3.14(graphql@15.5.0) '@graphql-tools/utils': 9.1.3(graphql@15.5.0) @@ -2363,7 +2363,7 @@ packages: resolution: {integrity: sha512-hZMjl/BbX10iagovakgf3IiqArx8TPsotq5pwBld37uIX1JiZoSbgbCIFol7u55bh32o6cfDEiiJgfAD5fbeyQ==} engines: {node: '>=16.0.0'} peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + graphql: 15.5.0 dependencies: '@graphql-typed-document-node/core': 3.2.0(graphql@15.5.0) dset: 3.1.2 @@ -2374,7 +2374,7 @@ packages: /@graphql-tools/utils@9.1.3(graphql@15.5.0): resolution: {integrity: sha512-bbJyKhs6awp1/OmP+WKA1GOyu9UbgZGkhIj5srmiMGLHohEOKMjW784Sk0BZil1w2x95UPu0WHw6/d/HVCACCg==} peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + graphql: 15.5.0 dependencies: graphql: 15.5.0 tslib: 2.6.2 @@ -2383,7 +2383,7 @@ packages: /@graphql-tools/utils@9.2.1(graphql@15.5.0): resolution: {integrity: sha512-WUw506Ql6xzmOORlriNrD6Ugx+HjVgYxt9KCXD9mHAak+eaXSwuGGPyE60hy9xaDEoXKBsG7SkG69ybitaVl6A==} peerDependencies: - graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + graphql: 15.5.0 dependencies: '@graphql-typed-document-node/core': 3.2.0(graphql@15.5.0) graphql: 15.5.0 @@ -2393,7 +2393,7 @@ packages: /@graphql-typed-document-node/core@3.2.0(graphql@15.5.0): resolution: {integrity: sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==} peerDependencies: - graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + graphql: 15.5.0 dependencies: graphql: 15.5.0 dev: false @@ -2409,7 +2409,7 @@ packages: resolution: {integrity: sha512-pjA7xLIYVCugxyM/FwG7SlbPVPxn8cr5AFT0I4EsgwK2z6D0oM+fN6guPam7twTVKr0BmA/EtVm2RWkik114Mw==} peerDependencies: '@graphql-tools/utils': ^9.2.1 - graphql: ^15.2.0 || ^16.0.0 + graphql: 15.5.0 graphql-yoga: ^3.9.1 dependencies: '@graphql-tools/utils': 9.2.1(graphql@15.5.0) @@ -2770,7 +2770,7 @@ packages: /@pothos/core@3.38.0(graphql@15.5.0): resolution: {integrity: sha512-2jlnvkrCmbrHxK269745TXxl185LwJtC5oMz4nbFP40LmVV9zbDV3WKqbG7D+3rg9hvxBe0RmmwWrOjNcGpICA==} peerDependencies: - graphql: '>=15.1.0' + graphql: 15.5.0 dependencies: graphql: 15.5.0 dev: false @@ -2779,7 +2779,7 @@ packages: resolution: {integrity: sha512-7eOsgKL2qCQ+/hHpye4sNNbR2mYogDXPWpQboqniYvUdOjez3VDYjAuOn8Ntw73kZQF0N1iloZPikhCxvY7xuw==} peerDependencies: '@pothos/core': '*' - graphql: '>=15.1.0' + graphql: 15.5.0 dependencies: '@pothos/core': 3.38.0(graphql@15.5.0) graphql: 15.5.0 @@ -2789,7 +2789,7 @@ packages: resolution: {integrity: sha512-CgZJLaHLt1Q30j+XCiWV6qVJcae1ksiUFdi8kz4sEsOhCNfdVwz64s8rx7SSqsdmPbjb68dJIDKpq2Qg9VZb8g==} peerDependencies: '@pothos/core': '*' - graphql: '>=15.1.0' + graphql: 15.5.0 dependencies: '@pothos/core': 3.38.0(graphql@15.5.0) graphql: 15.5.0 @@ -7048,7 +7048,7 @@ packages: resolution: {integrity: sha512-44yBuw2/DLNEiMypbNZBt1yMDbBmyVPVesPywnteGGALiBmdyy1JP8jSg8ClLePg8ZZxk0O4BLhd1a6U/1jDOQ==} engines: {node: ^12.20.0 || ^14.15.0 || >= 15.9.0} peerDependencies: - graphql: ^16.2.0 + graphql: 15.5.0 dependencies: graphql: 15.5.0 dev: false @@ -7057,7 +7057,7 @@ packages: resolution: {integrity: sha512-4EiZ3/UXYcjm+xFGP544/yW1+DVI8ZpKASFbzrV5EDTFWJp0ZvLl4Dy2fSZAzz9imKp5pZMIcjB0x/H69Pv/6w==} engines: {node: '>=10'} peerDependencies: - graphql: '>=0.11 <=16' + graphql: 15.5.0 dependencies: graphql: 15.5.0 dev: false @@ -7066,7 +7066,7 @@ packages: resolution: {integrity: sha512-MvCLhFecYNIKuxAZisPjpIL9lxRYbpgPSNKENDO/8CV3oiFlsLJHZb5dp2sVAeLafXHeZ9TgkijLthUBc1+Jag==} engines: {node: '>=16.0.0'} peerDependencies: - graphql: ^15.2.0 || ^16.0.0 + graphql: 15.5.0 dependencies: '@envelop/core': 4.0.1 '@graphql-tools/executor': 1.2.0(graphql@15.5.0)