diff --git a/src/dialect/dialect-adapter-base.ts b/src/dialect/dialect-adapter-base.ts index fab6512aa..f87f709fc 100644 --- a/src/dialect/dialect-adapter-base.ts +++ b/src/dialect/dialect-adapter-base.ts @@ -24,6 +24,10 @@ export abstract class DialectAdapterBase implements DialectAdapter { return false } + get supportsBatch(): boolean { + return false + } + abstract acquireMigrationLock( db: Kysely, options: MigrationLockOptions, diff --git a/src/dialect/dialect-adapter.ts b/src/dialect/dialect-adapter.ts index f8bf61fe1..e214983e1 100644 --- a/src/dialect/dialect-adapter.ts +++ b/src/dialect/dialect-adapter.ts @@ -37,6 +37,14 @@ export interface DialectAdapter { */ readonly supportsOutput?: boolean + /** + * Whether or not this dialect supports batched query execution. + * + * When true, multiple queries can be executed more efficiently by reducing + * network round trips. The actual batching mechanism is dialect-specific. + */ + readonly supportsBatch: boolean + /** * This method is used to acquire a lock for the migrations so that * it's not possible for two migration operations to run in parallel. diff --git a/src/dialect/postgres/postgres-adapter.ts b/src/dialect/postgres/postgres-adapter.ts index 6258617f3..459635593 100644 --- a/src/dialect/postgres/postgres-adapter.ts +++ b/src/dialect/postgres/postgres-adapter.ts @@ -15,6 +15,10 @@ export class PostgresAdapter extends DialectAdapterBase { return true } + override get supportsBatch(): boolean { + return true + } + override async acquireMigrationLock( db: Kysely, _opt: MigrationLockOptions, diff --git a/src/dialect/postgres/postgres-driver.ts b/src/dialect/postgres/postgres-driver.ts index 4a21b54b6..36e1cc008 100644 --- a/src/dialect/postgres/postgres-driver.ts +++ b/src/dialect/postgres/postgres-driver.ts @@ -212,6 +212,30 @@ class PostgresConnection implements DatabaseConnection { } } + async executeBatch( + compiledQueries: ReadonlyArray, + ): Promise[]> { + // FIXME: This does not actually use Postgres's pipeline mode, as it's not supported by node-postgres. + const results: QueryResult[] = [] + + try { + await this.executeQuery(CompiledQuery.raw('begin')) + + for (const compiledQuery of compiledQueries) { + const result = await this.executeQuery(compiledQuery) + results.push(result) + } + + await this.executeQuery(CompiledQuery.raw('commit')) + + return results + } catch (error) { + await this.executeQuery(CompiledQuery.raw('rollback')) + + throw error + } + } + [PRIVATE_RELEASE_METHOD](): void { this.#client.release() } diff --git a/src/driver/database-connection.ts b/src/driver/database-connection.ts index 6d9db8d33..7329eb473 100644 --- a/src/driver/database-connection.ts +++ b/src/driver/database-connection.ts @@ -11,6 +11,16 @@ export interface DatabaseConnection { compiledQuery: CompiledQuery, chunkSize?: number, ): AsyncIterableIterator> + + /** + * Executes multiple queries in a batch. + * + * This is optional and only implemented by dialects that support batching. + * When not implemented, queries will be executed sequentially. + */ + executeBatch?( + compiledQueries: ReadonlyArray, + ): Promise[]> } export interface QueryResult { diff --git a/src/index.ts b/src/index.ts index 193c72c5f..12853b701 100644 --- a/src/index.ts +++ b/src/index.ts @@ -30,6 +30,7 @@ export * from './query-builder/json-path-builder.js' export * from './query-builder/merge-query-builder.js' export * from './query-builder/merge-result.js' export * from './query-builder/order-by-item-builder.js' +export * from './query-builder/batch-builder.js' export * from './raw-builder/raw-builder.js' export * from './raw-builder/sql.js' diff --git a/src/kysely.ts b/src/kysely.ts index 18bda80d0..023a7d9c0 100644 --- a/src/kysely.ts +++ b/src/kysely.ts @@ -49,6 +49,7 @@ import { } from './util/provide-controlled-connection.js' import { ConnectionProvider } from './driver/connection-provider.js' import { logOnce } from './util/log-once.js' +import { BatchBuilder } from './query-builder/batch-builder.js' declare global { interface AsyncDisposable {} @@ -561,6 +562,47 @@ export class Kysely return this.getExecutor().executeQuery(compiledQuery) } + /** + * Creates a batch builder for executing multiple queries efficiently. + * + * Batching queries can reduce network round trips when executing multiple + * independent queries. Whether batching provides performance benefits depends + * on the dialect - check {@link DialectAdapter.supportsBatch} to see if your + * dialect supports optimized batching. + * + * ### Examples + * + * Execute multiple queries and get type-safe results: + * + * ```ts + * const [persons, pets] = await db + * .batch() + * .add(db.selectFrom('person').selectAll()) + * .add(db.selectFrom('pet').selectAll()) + * .execute() + * + * // persons is Person[] + * // pets is Pet[] + * ``` + * + * Mix different query types: + * + * ```ts + * const results = await db + * .batch() + * .add(db.selectFrom('person').selectAll().where('id', '=', 1)) + * .add(db.updateTable('person').set({ active: true }).where('id', '=', 1)) + * .add(db.deleteFrom('pet').where('id', '=', 123)) + * .execute() + * ``` + */ + batch(): BatchBuilder { + if (!this.#props.executor.adapter.supportsBatch) { + throw new Error('batch execution is not supported by this dialect') + } + return new BatchBuilder({ executor: this.#props.executor }) + } + async [Symbol.asyncDispose]() { await this.destroy() } @@ -593,6 +635,12 @@ export class Transaction extends Kysely { ) } + override batch(): BatchBuilder { + throw new Error( + 'calling the batch method for a Transaction is not supported', + ) + } + override async destroy(): Promise { throw new Error( 'calling the destroy method for a Transaction is not supported', @@ -1204,6 +1252,13 @@ class NotCommittedOrRolledBackAssertingExecutor implements QueryExecutor { return this.#executor.stream(compiledQuery, chunkSize) } + executeBatch( + compiledQueries: ReadonlyArray>, + ): Promise[]> { + assertNotCommittedOrRolledBack(this.#state) + return this.#executor.executeBatch(compiledQueries) + } + withConnectionProvider( connectionProvider: ConnectionProvider, ): QueryExecutor { diff --git a/src/query-builder/batch-builder.ts b/src/query-builder/batch-builder.ts new file mode 100644 index 000000000..99c9a0ba9 --- /dev/null +++ b/src/query-builder/batch-builder.ts @@ -0,0 +1,102 @@ +import { CompiledQuery } from '../query-compiler/compiled-query.js' +import { Compilable, isCompilable } from '../util/compilable.js' +import { QueryExecutor } from '../query-executor/query-executor.js' +import { freeze } from '../util/object-utils.js' + +export interface BatchBuilderProps { + readonly executor: QueryExecutor +} + +/** + * A builder for executing multiple queries in a batch. + * + * Batching queries can reduce network round trips and improve performance + * when executing multiple independent queries. + * + * ### Examples + * + * Execute multiple queries in a batch: + * + * ```ts + * const results = await db + * .batch() + * .add(db.selectFrom('person').selectAll().where('id', '=', 1)) + * .add(db.selectFrom('pet').selectAll().where('owner_id', '=', 1)) + * .execute() + * + * // results[0] contains the person query results + * // results[1] contains the pet query results + * ``` + * + * With type-safe destructuring: + * + * ```ts + * const [persons, pets] = await db + * .batch() + * .add(db.selectFrom('person').selectAll()) + * .add(db.selectFrom('pet').selectAll()) + * .execute() + * + * // persons is typed as Person[] + * // pets is typed as Pet[] + * ``` + */ +export class BatchBuilder { + readonly #props: BatchBuilderProps + readonly #queries: CompiledQuery[] + + constructor(props: BatchBuilderProps, queries: CompiledQuery[] = []) { + this.#props = freeze(props) + this.#queries = queries + } + + /** + * Adds a query to the batch. + * + * The query can be any compilable query builder or a pre-compiled query. + * + * ### Examples + * + * ```ts + * const batch = db + * .batch() + * .add(db.selectFrom('person').selectAll()) + * .add(db.updateTable('person').set({ active: true }).where('id', '=', 1)) + * .add(db.deleteFrom('pet').where('id', '=', 123)) + * ``` + */ + add(query: Compilable | CompiledQuery): BatchBuilder<[...R, O[]]> { + const compiledQuery = isCompilable(query) ? query.compile() : query + return new BatchBuilder<[...R, O[]]>(this.#props, [ + ...this.#queries, + compiledQuery, + ]) + } + + /** + * Executes all queries in the batch. + * + * Returns an array of results in the same order as the queries were added. + * Each result contains the rows returned by that query. + * + * ### Examples + * + * ```ts + * const [persons, pets, toys] = await db + * .batch() + * .add(db.selectFrom('person').selectAll()) + * .add(db.selectFrom('pet').selectAll()) + * .add(db.selectFrom('toy').selectAll()) + * .execute() + * ``` + */ + async execute(): Promise<{ [K in keyof R]: R[K] }> { + if (this.#queries.length === 0) { + return [] as { [K in keyof R]: R[K] } + } + + const results = await this.#props.executor.executeBatch(this.#queries) + + return results.map((result) => result.rows) as { [K in keyof R]: R[K] } + } +} diff --git a/src/query-executor/query-executor-base.ts b/src/query-executor/query-executor-base.ts index 4f07771ef..e1a87e5db 100644 --- a/src/query-executor/query-executor-base.ts +++ b/src/query-executor/query-executor-base.ts @@ -92,6 +92,30 @@ export abstract class QueryExecutorBase implements QueryExecutor { } } + async executeBatch( + compiledQueries: ReadonlyArray, + ): Promise[]> { + return await this.provideConnection(async (connection) => { + if (!this.adapter.supportsBatch || !connection.executeBatch) { + throw new Error('Batching is not supported by this dialect') + } + + const results = await connection.executeBatch(compiledQueries) + + const transformedResults = [] + for (let i = 0; i < results.length; i++) { + const transformed = await this.#transformResult( + results[i], + compiledQueries[i].queryId, + ) + transformedResults.push(transformed) + } + + // Cast is safe: because we know the results are QueryResult + return transformedResults as QueryResult[] + }) + } + abstract withConnectionProvider( connectionProvider: ConnectionProvider, ): QueryExecutorBase diff --git a/src/query-executor/query-executor.ts b/src/query-executor/query-executor.ts index 0c5a740f5..01725a45c 100644 --- a/src/query-executor/query-executor.ts +++ b/src/query-executor/query-executor.ts @@ -60,6 +60,20 @@ export interface QueryExecutor extends ConnectionProvider { chunkSize: number, ): AsyncIterableIterator> + /** + * Executes multiple compiled queries as a batch. + * + * If the dialect supports batching (adapter.supportsBatch is true), queries + * will be executed using the connection's batch execution method. Otherwise, + * queries will be executed sequentially. + * + * Results are returned in the same order as the input queries, with each + * result transformed by the plugins' `transformResult` method. + */ + executeBatch( + compiledQueries: ReadonlyArray>, + ): Promise[]> + /** * Returns a copy of this executor with a new connection provider. */ diff --git a/test/node/src/batch.test.ts b/test/node/src/batch.test.ts new file mode 100644 index 000000000..61e08850e --- /dev/null +++ b/test/node/src/batch.test.ts @@ -0,0 +1,328 @@ +import { + clearDatabase, + destroyTest, + initTest, + TestContext, + expect, + insertDefaultDataSet, + DIALECTS, +} from './test-setup.js' + +for (const dialect of DIALECTS) { + describe(`${dialect}: batch`, () => { + let ctx: TestContext + + before(async function () { + ctx = await initTest(this, dialect) + }) + + beforeEach(async () => { + await insertDefaultDataSet(ctx) + }) + + afterEach(async () => { + await clearDatabase(ctx) + }) + + after(async () => { + await destroyTest(ctx) + }) + + if (dialect !== 'postgres') { + it('should throw an error when batch is called', () => { + expect(() => ctx.db.batch()).to.throw( + 'batch execution is not supported by this dialect', + ) + }) + + it('should check adapter supportsBatch flag is false', () => { + const adapter = ctx.db.getExecutor().adapter + expect(adapter.supportsBatch).to.equal(false) + }) + + return + } + + // PostgreSQL-specific batch tests below + + it('should execute multiple select queries in a batch', async () => { + const [persons, pets] = await ctx.db + .batch() + .add(ctx.db.selectFrom('person').selectAll().orderBy('first_name')) + .add(ctx.db.selectFrom('pet').selectAll().orderBy('name')) + .execute() + + expect(persons).to.have.length(3) + expect(pets).to.have.length(3) + + expect(persons.map((p) => p.first_name)).to.eql([ + 'Arnold', + 'Jennifer', + 'Sylvester', + ]) + expect(pets.map((p) => p.name)).to.eql(['Catto', 'Doggo', 'Hammo']) + }) + + it('should execute a single query in a batch', async () => { + const [persons] = await ctx.db + .batch() + .add(ctx.db.selectFrom('person').selectAll()) + .execute() + + expect(persons).to.have.length(3) + }) + + it('should execute mixed query types in a batch', async () => { + const [selectedPersons, insertResult, updateResult] = await ctx.db + .batch() + .add( + ctx.db + .selectFrom('person') + .selectAll() + .where('first_name', '=', 'Jennifer'), + ) + .add( + ctx.db.insertInto('person').values({ + first_name: 'New', + last_name: 'Person', + gender: 'other', + }), + ) + .add( + ctx.db + .updateTable('person') + .set({ last_name: 'Updated' }) + .where('first_name', '=', 'Arnold'), + ) + .execute() + + expect(selectedPersons).to.have.length(1) + expect(selectedPersons[0].first_name).to.equal('Jennifer') + + // Check insert and update worked (no returning clause, so empty arrays) + expect(insertResult).to.be.an('array').and.to.have.length(0) + expect(updateResult).to.be.an('array').and.to.have.length(0) + + // Verify the changes + const allPersons = await ctx.db + .selectFrom('person') + .selectAll() + .orderBy('first_name') + .execute() + + expect(allPersons).to.have.length(4) + + const newPerson = allPersons.find((p) => p.first_name === 'New') + void expect(newPerson !== undefined).to.be.true + + expect( + allPersons.find((p) => p.first_name === 'Arnold')?.last_name, + ).to.equal('Updated') + }) + + it('should execute delete queries in a batch', async () => { + const [deleteResult] = await ctx.db + .batch() + .add(ctx.db.deleteFrom('pet').where('name', '=', 'Catto')) + .execute() + + expect(deleteResult).to.have.length(0) + + const remainingPets = await ctx.db + .selectFrom('pet') + .selectAll() + .orderBy('name') + .execute() + + expect(remainingPets).to.have.length(2) + expect(remainingPets.map((p) => p.name)).to.eql(['Doggo', 'Hammo']) + }) + + it('should handle empty batch', async () => { + const results = await ctx.db.batch().execute() + + expect(results).to.have.length(0) + }) + + it('should execute queries with where clauses', async () => { + const [arnoldPets, jenniferPets] = await ctx.db + .batch() + .add( + ctx.db + .selectFrom('pet') + .selectAll() + .where( + 'owner_id', + '=', + ctx.db + .selectFrom('person') + .select('id') + .where('first_name', '=', 'Arnold') + .limit(1), + ), + ) + .add( + ctx.db + .selectFrom('pet') + .selectAll() + .where( + 'owner_id', + '=', + ctx.db + .selectFrom('person') + .select('id') + .where('first_name', '=', 'Jennifer') + .limit(1), + ), + ) + .execute() + + expect(arnoldPets).to.have.length(1) + expect(jenniferPets).to.have.length(1) + }) + + it('should execute queries with compiled queries', async () => { + const query1 = ctx.db + .selectFrom('person') + .selectAll() + .where('first_name', '=', 'Arnold') + .compile() + + const query2 = ctx.db + .selectFrom('pet') + .selectAll() + .where('name', '=', 'Doggo') + .compile() + + const [persons, pets] = await ctx.db + .batch() + .add(query1) + .add(query2) + .execute() + + expect(persons).to.have.length(1) + expect(pets).to.have.length(1) + }) + + it('should work with returning clauses', async () => { + const [insertedPersons, updatedPersons] = await ctx.db + .batch() + .add( + ctx.db + .insertInto('person') + .values({ + first_name: 'Batch', + last_name: 'Test', + gender: 'other', + }) + .returningAll(), + ) + .add( + ctx.db + .updateTable('person') + .set({ last_name: 'BatchUpdated' }) + .where('first_name', '=', 'Arnold') + .returningAll(), + ) + .execute() + + expect(insertedPersons).to.have.length(1) + expect(insertedPersons[0].first_name).to.equal('Batch') + expect(insertedPersons[0].last_name).to.equal('Test') + + expect(updatedPersons).to.have.length(1) + expect(updatedPersons[0].first_name).to.equal('Arnold') + expect(updatedPersons[0].last_name).to.equal('BatchUpdated') + }) + + it('should work within a transaction', async () => { + await ctx.db.transaction().execute(async (trx) => { + // Batch should not be callable on a transaction + expect(() => { + ;(trx as any).batch() + }).to.throw( + 'calling the batch method for a Transaction is not supported', + ) + }) + }) + + it('should preserve query order in results', async () => { + const [result1, result2, result3] = await ctx.db + .batch() + .add( + ctx.db + .selectFrom('person') + .select('first_name') + .where('first_name', '=', 'Arnold'), + ) + .add( + ctx.db + .selectFrom('person') + .select('first_name') + .where('first_name', '=', 'Jennifer'), + ) + .add( + ctx.db + .selectFrom('person') + .select('first_name') + .where('first_name', '=', 'Sylvester'), + ) + .execute() + + expect(result1[0].first_name).to.equal('Arnold') + expect(result2[0].first_name).to.equal('Jennifer') + expect(result3[0].first_name).to.equal('Sylvester') + }) + + it('should handle queries that return no results', async () => { + const [emptyResult, nonEmptyResult] = await ctx.db + .batch() + .add( + ctx.db + .selectFrom('person') + .selectAll() + .where('first_name', '=', 'NonExistent'), + ) + .add(ctx.db.selectFrom('person').selectAll()) + .execute() + + expect(emptyResult).to.have.length(0) + expect(nonEmptyResult).to.have.length(3) + }) + + it('should work with complex queries', async () => { + const [joinResult, aggregateResult] = await ctx.db + .batch() + .add( + ctx.db + .selectFrom('person') + .innerJoin('pet', 'pet.owner_id', 'person.id') + .select(['person.first_name', 'pet.name as pet_name']) + .orderBy('person.first_name') + .orderBy('pet.name'), + ) + .add( + ctx.db + .selectFrom('person') + .select((eb) => [ + 'first_name', + eb.fn.count('id').as('person_count'), + ]) + .groupBy('first_name') + .orderBy('first_name'), + ) + .execute() + + expect(joinResult.length).to.be.greaterThan(0) + expect(joinResult[0]).to.have.property('pet_name') + + expect(aggregateResult).to.have.length(3) + expect(aggregateResult[0]).to.have.property('person_count') + }) + + it('should check adapter supportsBatch flag', () => { + const adapter = ctx.db.getExecutor().adapter + expect(adapter.supportsBatch).to.equal(true) + }) + }) +} diff --git a/test/typings/test-d/batch.test-d.ts b/test/typings/test-d/batch.test-d.ts new file mode 100644 index 000000000..18e27635d --- /dev/null +++ b/test/typings/test-d/batch.test-d.ts @@ -0,0 +1,295 @@ +import { expectError, expectType } from 'tsd' +import { + Kysely, + type DeleteResult, + type InsertResult, + type UpdateResult, +} from '..' +import { Database } from '../shared' + +void async function testBasicBatch(db: Kysely) { + // Two select queries - should preserve tuple types + const [persons, pets] = await db + .batch() + .add(db.selectFrom('person').selectAll()) + .add(db.selectFrom('pet').selectAll()) + .execute() + + expectType< + { + id: number + first_name: string + last_name: string | null + gender: 'male' | 'female' | 'other' + age: number + modified_at: Date + marital_status: 'single' | 'married' | 'divorced' | 'widowed' | null + deleted_at: Date | null + }[] + >(persons) + + expectType< + { + id: string + name: string + owner_id: number + species: 'dog' | 'cat' + }[] + >(pets) +} + +void async function testSingleQueryBatch(db: Kysely) { + // Single query should still return a tuple with one element + const [result] = await db + .batch() + .add(db.selectFrom('person').select(['id', 'first_name'])) + .execute() + + expectType<{ id: number; first_name: string }[]>(result) +} + +void async function testMixedQueryTypes(db: Kysely) { + // Mix of select, insert, update, delete + const [selected, inserted, updated, deleted] = await db + .batch() + .add(db.selectFrom('person').select('id')) + .add( + db.insertInto('person').values({ + first_name: 'Test', + gender: 'other', + age: 25, + }), + ) + .add(db.updateTable('person').set({ age: 30 }).where('id', '=', 1)) + .add(db.deleteFrom('pet').where('id', '=', '1')) + .execute() + + expectType<{ id: number }[]>(selected) + expectType(inserted) + expectType(updated) + expectType(deleted) +} + +void async function testBatchWithReturning(db: Kysely) { + const [inserted, updated] = await db + .batch() + .add( + db + .insertInto('person') + .values({ + first_name: 'John', + gender: 'male', + age: 30, + }) + .returning(['id', 'first_name']), + ) + .add( + db + .updateTable('person') + .set({ age: 31 }) + .where('id', '=', 1) + .returningAll(), + ) + .execute() + + expectType<{ id: number; first_name: string }[]>(inserted) + expectType< + { + id: number + first_name: string + last_name: string | null + gender: 'male' | 'female' | 'other' + age: number + modified_at: Date + marital_status: 'single' | 'married' | 'divorced' | 'widowed' | null + deleted_at: Date | null + }[] + >(updated) +} + +void async function testBatchWithCompiledQueries(db: Kysely) { + const query1 = db.selectFrom('person').select(['id', 'first_name']).compile() + + const query2 = db.selectFrom('pet').selectAll().compile() + + const [persons, pets] = await db.batch().add(query1).add(query2).execute() + + expectType<{ id: number; first_name: string }[]>(persons) + expectType< + { + id: string + name: string + owner_id: number + species: 'dog' | 'cat' + }[] + >(pets) +} + +void async function testEmptyBatch(db: Kysely) { + const result = await db.batch().execute() + + expectType<[]>(result) +} + +void async function testBatchWithSubqueries(db: Kysely) { + const [result] = await db + .batch() + .add( + db + .selectFrom('person') + .select((eb) => [ + 'id', + eb + .selectFrom('pet') + .select('name') + .whereRef('pet.owner_id', '=', 'person.id') + .limit(1) + .as('pet_name'), + ]), + ) + .execute() + + expectType<{ id: number; pet_name: string | null }[]>(result) +} + +void async function testBatchWithJoins(db: Kysely) { + const [result] = await db + .batch() + .add( + db + .selectFrom('person') + .innerJoin('pet', 'pet.owner_id', 'person.id') + .select(['person.first_name', 'pet.name as pet_name']), + ) + .execute() + + expectType<{ first_name: string; pet_name: string }[]>(result) +} + +void async function testBatchTypeInference(db: Kysely) { + // Test that adding queries incrementally preserves types + const batch1 = db.batch() + const batch2 = batch1.add(db.selectFrom('person').select('id')) + const batch3 = batch2.add(db.selectFrom('pet').select('name')) + const batch4 = batch3.add(db.selectFrom('movie').select(['id', 'stars'])) + + const [persons, pets, movies] = await batch4.execute() + + expectType<{ id: number }[]>(persons) + expectType<{ name: string }[]>(pets) + expectType<{ id: string; stars: number }[]>(movies) +} + +void async function testBatchWithExpressions(db: Kysely) { + const [result] = await db + .batch() + .add( + db + .selectFrom('person') + .select((eb) => [ + 'id', + eb.fn.count('id').as('count'), + eb.fn.max('age').as('max_age'), + ]) + .groupBy('id'), + ) + .execute() + + expectType< + { id: number; max_age: number; count: number | bigint | string }[] + >(result) +} + +void async function testBatchErrors(db: Kysely) { + // Non-existent table + expectError( + db.batch().add(db.selectFrom('not_a_table').selectAll()).execute(), + ) + + // Non-existent column + expectError( + db.batch().add(db.selectFrom('person').select('not_a_column')).execute(), + ) + + // Type mismatch in values + expectError( + db + .batch() + .add( + db.insertInto('person').values({ + first_name: 123, // Should be string + gender: 'other', + age: 30, + }), + ) + .execute(), + ) + + // Missing required columns + expectError( + db + .batch() + .add( + db.insertInto('person').values({ + first_name: 'Test', + // Missing gender and age + }), + ) + .execute(), + ) +} + +void async function testBatchWithCTE(db: Kysely) { + const [result] = await db + .batch() + .add( + db + .with('top_persons', (db) => + db.selectFrom('person').select(['id', 'first_name']).limit(10), + ) + .selectFrom('top_persons') + .selectAll(), + ) + .execute() + + expectType<{ id: number; first_name: string }[]>(result) +} + +void async function testLongBatchChain(db: Kysely) { + // Test with many queries to ensure tuple types scale + const [r1, r2, r3, r4, r5] = await db + .batch() + .add(db.selectFrom('person').select('id')) + .add(db.selectFrom('pet').select('name')) + .add(db.selectFrom('movie').select('stars')) + .add(db.selectFrom('person').select(['first_name', 'last_name'])) + .add(db.selectFrom('pet').select(['id', 'species'])) + .execute() + + expectType<{ id: number }[]>(r1) + expectType<{ name: string }[]>(r2) + expectType<{ stars: number }[]>(r3) + expectType<{ first_name: string; last_name: string | null }[]>(r4) + expectType<{ id: string; species: 'dog' | 'cat' }[]>(r5) +} + +void async function testBatchWithDynamicReferences(db: Kysely) { + const { ref } = db.dynamic + + const [result] = await db + .batch() + .add( + db + .selectFrom('person') + .select(['id', ref<'first_name' | 'last_name'>('first_name')]), + ) + .execute() + + expectType< + { + id: number + first_name: string | undefined + last_name: string | null | undefined + }[] + >(result) +}