Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions alchemy-web/src/content/docs/providers/cloudflare/d1-database.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,21 @@ const db = await D1Database("my-db", {
});
```

## Importing Data

Create a database with import SQL files.

```ts
import { D1Database } from "alchemy/cloudflare";

const db = await D1Database("my-db", {
name: "my-db",
importDir: "./imports",
});
```

The import files will be run using [Cloudflare's D1 import API](https://developers.cloudflare.com/d1/best-practices/import-export-data/).

## With Location Hint

Create a database with a specific location hint for optimal performance.
Expand Down
53 changes: 36 additions & 17 deletions alchemy/src/cloudflare/d1-database.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@ import {
} from "./api.ts";
import { withJurisdiction } from "./bucket.ts";
import { cloneD1Database } from "./d1-clone.ts";
import { importD1Database } from "./d1-import.ts";
import { applyLocalD1Migrations } from "./d1-local-migrations.ts";
import { applyMigrations, listMigrationsFiles } from "./d1-migrations.ts";
import { applyMigrations } from "./d1-migrations.ts";
import { listSqlFiles, type D1SqlFile } from "./d1-sql-file.ts";
import { deleteMiniflareBinding } from "./miniflare/delete.ts";

const DEFAULT_MIGRATIONS_TABLE = "d1_migrations";
Expand Down Expand Up @@ -85,11 +87,10 @@ export interface D1DatabaseProps extends CloudflareApiOptions {
clone?: D1Database | { id: string } | { name: string };

/**
* These files will be generated internally with the D1Database wrapper function when migrationsDir is specified
*
* @private
* Directory containing import SQL files.
* After migrations are applied, these files will be run using [Cloudflare's D1 import API](https://developers.cloudflare.com/d1/best-practices/import-export-data/).
*/
migrationsFiles?: Array<{ id: string; sql: string }>;
importDir?: string;

/**
* Name of the table used to track migrations. Only used if migrationsDir is specified. Defaults to 'd1_migrations'
Expand All @@ -102,6 +103,7 @@ export interface D1DatabaseProps extends CloudflareApiOptions {
* This is analogous to wrangler's `migrations_dir`.
*/
migrationsDir?: string;

/**
* Whether to emulate the database locally when Alchemy is running in watch mode.
*/
Expand Down Expand Up @@ -139,6 +141,7 @@ export type D1Database = Pick<
| "migrationsTable"
| "primaryLocationHint"
| "readReplication"
| "importDir"
> & {
type: "d1";
/**
Expand Down Expand Up @@ -251,15 +254,17 @@ export type D1Database = Pick<
*/
export async function D1Database(
id: string,
props: Omit<D1DatabaseProps, "migrationsFiles"> = {},
props: Omit<D1DatabaseProps, "migrationsFiles" | "importFiles"> = {},
): Promise<D1Database> {
const migrationsFiles = props.migrationsDir
? await listMigrationsFiles(props.migrationsDir)
: [];
const [migrationsFiles, importFiles] = await Promise.all([
props.migrationsDir ? await listSqlFiles(props.migrationsDir) : [],
props.importDir ? await listSqlFiles(props.importDir) : [],
]);

return _D1Database(id, {
...props,
migrationsFiles,
importFiles,
dev: {
...(props.dev ?? {}),
// force local migrations to run even if the database was already deployed live
Expand All @@ -274,7 +279,10 @@ const _D1Database = Resource(
async function (
this: Context<D1Database>,
id: string,
props: D1DatabaseProps,
props: D1DatabaseProps & {
migrationsFiles: Array<D1SqlFile>;
importFiles: Array<D1SqlFile>;
},
): Promise<D1Database> {
const databaseName =
props.name ?? this.output?.name ?? this.scope.createPhysicalName(id);
Expand All @@ -292,11 +300,12 @@ const _D1Database = Resource(
const adopt = props.adopt ?? this.scope.adopt;

if (local) {
if (props.migrationsFiles && props.migrationsFiles.length > 0) {
if (props.migrationsFiles?.length || props.importFiles?.length) {
await applyLocalD1Migrations({
databaseId: dev.id,
migrationsTable: props.migrationsTable ?? DEFAULT_MIGRATIONS_TABLE,
migrations: props.migrationsFiles,
migrations: props.migrationsFiles ?? [],
imports: props.importFiles ?? [],
rootDir: this.scope.rootDir,
});
}
Expand All @@ -308,6 +317,7 @@ const _D1Database = Resource(
primaryLocationHint: props.primaryLocationHint,
migrationsDir: props.migrationsDir,
migrationsTable: props.migrationsTable ?? DEFAULT_MIGRATIONS_TABLE,
importDir: props.importDir,
dev,
jurisdiction,
};
Expand Down Expand Up @@ -401,12 +411,13 @@ const _D1Database = Resource(
dbData = await createDatabase(api, databaseName, props);
}

const databaseId = dbData.result.uuid!;

// Run migrations if provided
if (props.migrationsFiles && props.migrationsFiles.length > 0) {
try {
const migrationsTable =
props.migrationsTable || DEFAULT_MIGRATIONS_TABLE;
const databaseId = dbData.result.uuid || this.output?.id;

if (!databaseId) {
throw new Error("Database ID not found for migrations");
Expand All @@ -424,19 +435,27 @@ const _D1Database = Resource(
throw migrationErr;
}
}
if (!dbData.result.uuid) {
// TODO(sam): why would this ever happen?
throw new Error("Database ID not found");
if (props.importFiles?.length) {
await Promise.all(
props.importFiles.map(async (file) => {
await importD1Database(api, {
databaseId,
sqlData: file.sql,
filename: file.id,
});
}),
);
}
return {
type: "d1",
id: dbData.result.uuid!,
id: databaseId,
name: databaseName,
readReplication: props.readReplication,
primaryLocationHint: props.primaryLocationHint,
dev,
migrationsDir: props.migrationsDir,
migrationsTable: props.migrationsTable ?? DEFAULT_MIGRATIONS_TABLE,
importDir: props.importDir,
jurisdiction,
};
},
Expand Down
45 changes: 33 additions & 12 deletions alchemy/src/cloudflare/d1-local-migrations.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import * as mf from "miniflare";
import type { D1SqlFile } from "./d1-sql-file.ts";
import { getDefaultPersistPath } from "./miniflare/paths.ts";

export interface D1LocalMigrationOptions {
rootDir: string;
databaseId: string;
migrationsTable: string;
migrations: { id: string; sql: string }[];
migrations: Array<D1SqlFile>;
imports: Array<D1SqlFile>;
}

export const applyLocalD1Migrations = async (
Expand All @@ -21,9 +23,8 @@ export const applyLocalD1Migrations = async (
});
try {
await miniflare.ready;
// TODO(sam): don't use `any` once prisma is fixed upstream
const db: any = await miniflare.getD1Database("DB");
const session: any = db.withSession("first-primary");
const db = await miniflare.getD1Database("DB");
const session = db.withSession("first-primary");
await session
.prepare(
`CREATE TABLE IF NOT EXISTS ${options.migrationsTable} (
Expand All @@ -33,22 +34,42 @@ export const applyLocalD1Migrations = async (
)`,
)
.run();
const appliedMigrations: {
results: { name: string }[];
await session
.prepare(
`ALTER TABLE ${options.migrationsTable} ADD COLUMN type TEXT NOT NULL DEFAULT 'migration';`,
)
.run();
const applied: {
results: { name: string; type: "migration" | "import" }[];
} = await session
.prepare(
`SELECT name FROM ${options.migrationsTable} ORDER BY applied_at ASC`,
`SELECT name, type FROM ${options.migrationsTable} ORDER BY applied_at ASC`,
)
.all();
const insertRecord = session.prepare(
`INSERT INTO ${options.migrationsTable} (name) VALUES (?)`,
`INSERT INTO ${options.migrationsTable} (name, type) VALUES (?, ?)`,
);
for (const migration of options.migrations) {
if (appliedMigrations.results.some((m) => m.name === migration.id)) {
for (const { id, sql } of options.migrations) {
if (applied.results.some((m) => m.name === id)) {
continue;
}
await session.prepare(sql).run();
await insertRecord.bind(id, "migration").run();
}
for (const { id, sql, hash } of options.imports) {
const name = `${id}-${hash}`;
if (applied.results.some((m) => m.name === name)) {
continue;
}
await session.prepare(migration.sql).run();
await insertRecord.bind(migration.id).run();
await session.batch(
// Split into statements to prevent D1_ERROR: statement too long: SQLITE_TOOBIG.
// This is split naively by semicolons followed by newlines - not perfect but should work 99% of the time.
sql
.split(/;\r?\n/)
.filter((s) => s.trim())
.map((s) => session.prepare(s)),
);
await insertRecord.bind(name, "import").run();
}
} finally {
await miniflare.dispose();
Expand Down
43 changes: 0 additions & 43 deletions alchemy/src/cloudflare/d1-migrations.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import { glob } from "glob";
import * as fs from "node:fs/promises";
import path from "pathe";
import { logger } from "../util/logger.ts";
import { handleApiError } from "./api-error.ts";
import type { CloudflareApi } from "./api.ts";
Expand All @@ -14,16 +11,6 @@ export interface D1MigrationOptions {
quiet?: boolean;
}

const getPrefix = (name: string) => {
const prefix = name.split("_")[0];
const num = Number.parseInt(prefix, 10);
return Number.isNaN(num) ? null : num;
};

async function readMigrationFile(filePath: string): Promise<string> {
return fs.readFile(filePath, "utf-8");
}

/**
* Detects the current schema of the migration table.
* Returns info about the table structure to determine if migration is needed.
Expand Down Expand Up @@ -153,36 +140,6 @@ async function migrateLegacySchema(
}
}

/**
* Reads migration SQL files from the migrationsDir, sorted by filename.
* @param migrationsDir Directory containing .sql migration files
*/
export async function listMigrationsFiles(
migrationsDir: string,
): Promise<Array<{ id: string; sql: string }>> {
const entries = await glob("**/*.sql", {
cwd: migrationsDir,
});

const sqlFiles = entries.sort((a: string, b: string) => {
const aNum = getPrefix(a);
const bNum = getPrefix(b);

if (aNum !== null && bNum !== null) return aNum - bNum;
if (aNum !== null) return -1;
if (bNum !== null) return 1;

return a.localeCompare(b);
});

return await Promise.all(
sqlFiles.map(async (file) => ({
id: file,
sql: await readMigrationFile(path.join(migrationsDir, file)),
})),
);
}

/**
* Ensures the migrations table exists in the D1 database with wrangler-compatible schema.
* Handles migration from legacy 2-column schema to 3-column schema if needed.
Expand Down
49 changes: 49 additions & 0 deletions alchemy/src/cloudflare/d1-sql-file.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import crypto from "node:crypto";
import fs from "node:fs/promises";
import path from "pathe";

export interface D1SqlFile {
id: string;
sql: string;
hash: string;
}

/**
* Lists SQL files from the directory, sorted by filename.
* @param directory Directory containing .sql files
*/
export async function listSqlFiles(directory: string): Promise<D1SqlFile[]> {
const files = await Array.fromAsync(
fs.glob("**/*.sql", {
cwd: directory,
}),
);

const sortedFiles = files.sort((a: string, b: string) => {
const aNum = getPrefix(a);
const bNum = getPrefix(b);

if (aNum !== null && bNum !== null) return aNum - bNum;
if (aNum !== null) return -1;
if (bNum !== null) return 1;

return a.localeCompare(b);
});

return Promise.all(
sortedFiles.map(async (id) => {
const sql = await fs.readFile(path.join(directory, id), "utf-8");
const hash = crypto.createHash("sha256").update(sql).digest("hex");
const file: D1SqlFile = { id, sql, hash };
// Make the sql property non-enumerable so it's not included in state. This prevents state store errors caused by large sql files.
Object.defineProperty(file, "sql", { enumerable: false });
return file;
}),
);
}

const getPrefix = (name: string) => {
const prefix = name.split("_")[0];
const num = Number.parseInt(prefix, 10);
return Number.isNaN(num) ? null : num;
};
7 changes: 3 additions & 4 deletions alchemy/src/state/d1-state-store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,12 +78,11 @@ const upsertDatabase = async (api: CloudflareApi, databaseName: string) => {
const { listDatabases, createDatabase } = await import(
"../cloudflare/d1-database.ts"
);
const { applyMigrations, listMigrationsFiles } = await import(
"../cloudflare/d1-migrations.ts"
);
const { listSqlFiles } = await import("../cloudflare/d1-sql-file.ts");
const { applyMigrations } = await import("../cloudflare/d1-migrations.ts");
const migrate = async (databaseId: string) => {
await applyMigrations({
migrationsFiles: await listMigrationsFiles(MIGRATIONS_DIRECTORY),
migrationsFiles: await listSqlFiles(MIGRATIONS_DIRECTORY),
migrationsTable: "migrations",
accountId: api.accountId,
databaseId,
Expand Down
3 changes: 2 additions & 1 deletion examples/cloudflare-worker-simple/.gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
.alchemy/
.alchemy/
imports/
Loading
Loading