diff --git a/docs/docs/plugins/index.md b/docs/docs/plugins/index.md index f0e4b51d..fc38f0bc 100644 --- a/docs/docs/plugins/index.md +++ b/docs/docs/plugins/index.md @@ -13,7 +13,7 @@ For complete API documentation, see the [`Plugin`](../api/appkit/Class.Plugin.md Configure plugins when creating your AppKit instance: ```typescript -import { createApp, server, analytics, genie, files } from "@databricks/appkit"; +import { createApp, server, analytics, genie, files, proto, jobs } from "@databricks/appkit"; const AppKit = await createApp({ plugins: [ @@ -21,6 +21,8 @@ const AppKit = await createApp({ analytics(), genie(), files(), + proto(), + jobs(), ], }); ``` diff --git a/docs/docs/plugins/jobs.md b/docs/docs/plugins/jobs.md new file mode 100644 index 00000000..ab100469 --- /dev/null +++ b/docs/docs/plugins/jobs.md @@ -0,0 +1,211 @@ +--- +sidebar_position: 9 +--- + +# Jobs plugin + +Databricks Jobs API integration for submitting, monitoring, and managing job runs. Wraps the Jobs REST API with typed methods, integrated telemetry, retry, and timeout support. + +**Key features:** +- **Submit one-time runs** — launch notebook, Python, JAR, or SQL tasks without creating a persistent job +- **Trigger existing jobs** — run pre-defined jobs with optional parameter overrides +- **Poll for completion** — `waitForRun` polls with exponential backoff until terminal state +- **Full lifecycle management** — create, get, list, cancel runs and jobs + +## Basic usage + +```ts +import { createApp, jobs, server } from "@databricks/appkit"; + +const app = await createApp({ + plugins: [ + server(), + jobs(), + ], +}); +``` + +## Submit and wait for a run + +```ts +// Submit a one-time notebook run +const { run_id } = await app.jobs.submitRun({ + run_name: "daily-analysis", + tasks: [{ + task_key: "main", + notebook_task: { + notebook_path: "/Users/me/notebooks/analysis", + }, + }], +}); + +// Poll until the run reaches a terminal state +const run = await app.jobs.waitForRun(run_id); +console.log(run.state?.result_state); // "SUCCESS" +``` + +## Trigger an existing job + +```ts +// Run a pre-defined job by ID with parameter overrides +const { run_id } = await app.jobs.runNow({ + job_id: 12345, + notebook_params: { + date: "2025-01-15", + mode: "full-refresh", + }, +}); + +const run = await app.jobs.waitForRun(run_id); +``` + +## Poll for status + +```ts +// Check run state without blocking +const run = await app.jobs.getRun(run_id); + +switch (run.state?.life_cycle_state) { + case "PENDING": + case "RUNNING": + console.log("Still running..."); + break; + case "TERMINATED": + console.log("Result:", run.state.result_state); + break; + case "SKIPPED": + console.log("Run was skipped"); + break; + case "INTERNAL_ERROR": + console.error("Internal error:", run.state.state_message); + break; +} +``` + +## Get task output + +```ts +const output = await app.jobs.getRunOutput(run_id); +console.log(output.notebook_output?.result); +``` + +## Cancel a run + +```ts +await app.jobs.cancelRun(run_id); +``` + +## List runs for a job + +```ts +const runs = await app.jobs.listRuns({ + job_id: 12345, + active_only: true, +}); + +for (const run of runs) { + console.log(`Run ${run.run_id}: ${run.state?.life_cycle_state}`); +} +``` + +## Create a job + +```ts +const { job_id } = await app.jobs.createJob({ + name: "nightly-etl", + tasks: [{ + task_key: "extract", + notebook_task: { + notebook_path: "/Users/me/notebooks/extract", + }, + }, { + task_key: "transform", + depends_on: [{ task_key: "extract" }], + notebook_task: { + notebook_path: "/Users/me/notebooks/transform", + }, + }], + schedule: { + quartz_cron_expression: "0 0 2 * * ?", + timezone_id: "America/Los_Angeles", + }, +}); +``` + +## Combining with other plugins + +### Jobs + Files: upload artifacts for job input + +```ts +import { createApp, jobs, files, server } from "@databricks/appkit"; + +const app = await createApp({ + plugins: [server(), jobs(), files()], +}); + +// Upload input data to a UC Volume +await app.files("staging").upload("input/params.json", Buffer.from( + JSON.stringify({ threshold: 0.95, mode: "strict" }), +)); + +// Submit a run that reads from the volume +const { run_id } = await app.jobs.submitRun({ + run_name: "parameterized-run", + tasks: [{ + task_key: "main", + notebook_task: { + notebook_path: "/Users/me/notebooks/process", + base_parameters: { + config_path: "/Volumes/catalog/schema/staging/input/params.json", + }, + }, + }], +}); +``` + +### Jobs + Lakebase: store results in database + +```ts +import { createApp, jobs, lakebase, server } from "@databricks/appkit"; + +const app = await createApp({ + plugins: [server(), jobs(), lakebase()], +}); + +// Wait for job to finish, then query results +const run = await app.jobs.waitForRun(run_id); +if (run.state?.result_state === "SUCCESS") { + const rows = await app.lakebase.query( + "SELECT * FROM results WHERE run_id = $1", + [run_id], + ); + console.log(`Job produced ${rows.length} result rows`); +} +``` + +## API reference + +| Method | Description | +| --- | --- | +| `submitRun(req)` | Submit a one-time run without creating a job | +| `runNow(req)` | Trigger an existing job by ID | +| `getRun(runId)` | Get run metadata and state | +| `getRunOutput(runId)` | Get task output (notebook result, logs) | +| `cancelRun(runId)` | Cancel a running or pending run | +| `listRuns(req)` | List runs for a job (supports filtering) | +| `getJob(jobId)` | Get job definition and settings | +| `createJob(req)` | Create a new job with tasks and schedule | +| `waitForRun(runId, timeoutMs?)` | Poll until terminal state (TERMINATED, SKIPPED, INTERNAL_ERROR) | + +## Configuration + +```ts +jobs() // Defaults: 60s timeout, 5s poll interval +jobs({ timeout: 120000 }) // 2 minute API timeout +jobs({ pollIntervalMs: 10000 }) // 10 second poll interval for waitForRun +``` + +| Option | Type | Default | Description | +| --- | --- | --- | --- | +| `timeout` | `number` | `60000` | Default timeout for Jobs API calls (ms) | +| `pollIntervalMs` | `number` | `5000` | Poll interval for `waitForRun` (ms) | diff --git a/docs/docs/plugins/proto.md b/docs/docs/plugins/proto.md new file mode 100644 index 00000000..dec03644 --- /dev/null +++ b/docs/docs/plugins/proto.md @@ -0,0 +1,252 @@ +--- +sidebar_position: 8 +--- + +# Proto plugin + +Typed data contracts via protobuf. Define your data shapes once in `.proto` files, generate TypeScript types, and use them across plugins, routes, and jobs — no more ad-hoc interfaces that drift between producer and consumer. + +**Key features:** +- **Single schema definition** — one `.proto` file generates types for all consumers +- **Binary + JSON serialization** — efficient binary for storage, JSON for APIs +- **Type-safe create** — construct messages with compile-time field validation +- **Interop with other plugins** — serialize to bytes, pass to Files plugin for Volume I/O; serialize to JSON, pass to Analytics plugin for SQL; serialize to binary, send over gRPC + +## Basic usage + +```ts +import { createApp, proto, server } from "@databricks/appkit"; + +const app = await createApp({ + plugins: [ + server(), + proto(), + ], +}); +``` + +## Defining contracts + +Create `.proto` files in your project: + +```protobuf +// proto/myapp/v1/models.proto +syntax = "proto3"; +package myapp.v1; + +message Customer { + string id = 1; + string name = 2; + string email = 3; + double lifetime_value = 4; + bool is_active = 5; +} + +message Order { + string order_id = 1; + string customer_id = 2; + double total = 3; + repeated OrderItem items = 4; +} + +message OrderItem { + string product_id = 1; + string name = 2; + int32 quantity = 3; + double unit_price = 4; +} +``` + +Generate TypeScript types: + +```bash +npx buf generate proto/ +``` + +This produces typed interfaces like `Customer`, `Order`, `OrderItem` with schemas like `CustomerSchema`, `OrderSchema`. + +## Creating messages + +```ts +import { CustomerSchema } from "../proto/gen/myapp/v1/models_pb.js"; + +// Type-safe — unknown fields are compile errors +const customer = app.proto.create(CustomerSchema, { + id: "cust-001", + name: "Acme Corp", + email: "billing@acme.com", + lifetimeValue: 15_230.50, + isActive: true, +}); +``` + +## Serialization + +### Binary (compact, for storage and transfer) + +```ts +const bytes = app.proto.serialize(CustomerSchema, customer); +// bytes: Uint8Array — pass to Files plugin, store in database, send over network + +const recovered = app.proto.deserialize(CustomerSchema, bytes); +// recovered.name === "Acme Corp" +``` + +### JSON (human-readable, for APIs and logging) + +```ts +const json = app.proto.toJSON(CustomerSchema, customer); +// { "id": "cust-001", "name": "Acme Corp", "email": "billing@acme.com", +// "lifetimeValue": 15230.5, "isActive": true } + +const fromApi = app.proto.fromJSON(CustomerSchema, requestBody); +``` + +## Combining with other plugins + +### Proto + Files: typed Volume I/O + +```ts +import { createApp, proto, files, server } from "@databricks/appkit"; + +const app = await createApp({ + plugins: [server(), proto(), files()], +}); + +// Serialize a message and upload to a UC Volume +const bytes = app.proto.serialize(OrderSchema, order); +await app.files("reports").upload("orders/latest.bin", Buffer.from(bytes)); + +// Download and deserialize +const data = await app.files("reports").download("orders/latest.bin"); +const loaded = app.proto.deserialize(OrderSchema, new Uint8Array(data)); +``` + +### Proto + Lakebase: typed database rows + +```ts +import { createApp, proto, lakebase, server } from "@databricks/appkit"; + +const app = await createApp({ + plugins: [server(), proto(), lakebase()], +}); + +// Convert proto message to JSON for SQL insert +const json = app.proto.toJSON(CustomerSchema, customer); +await app.lakebase.query( + `INSERT INTO customers (id, name, email, lifetime_value, is_active) + VALUES ($1, $2, $3, $4, $5)`, + [json.id, json.name, json.email, json.lifetimeValue, json.isActive], +); +``` + +### Proto + Analytics: typed query results + +```ts +// Parse SQL query results into typed proto messages +const rows = await app.analytics.query("top-customers", { minValue: 1000 }); +const customers = rows.map((row) => + app.proto.fromJSON(CustomerSchema, row), +); +``` + +## API routes with typed contracts + +```ts +import express from "express"; + +app.server.extend((expressApp) => { + expressApp.get("/api/customers/:id", async (req, res) => { + const row = await app.lakebase.query( + "SELECT * FROM customers WHERE id = $1", + [req.params.id], + ); + if (!row.length) return res.status(404).json({ error: "Not found" }); + + // Parse to proto and back to JSON — guarantees the response + // matches the contract even if the DB has extra columns + const customer = app.proto.fromJSON(CustomerSchema, row[0]); + res.json(app.proto.toJSON(CustomerSchema, customer)); + }); + + expressApp.post("/api/orders", express.json(), async (req, res) => { + // Validate request body against the proto schema + const order = app.proto.fromJSON(OrderSchema, req.body); + // order is now typed — order.items, order.total, etc. + + const bytes = app.proto.serialize(OrderSchema, order); + await app.files("orders").upload( + `${order.orderId}.bin`, + Buffer.from(bytes), + ); + + res.status(201).json(app.proto.toJSON(OrderSchema, order)); + }); +}); +``` + +## Proto setup with buf + +Install buf and protoc-gen-es: + +```bash +pnpm add -D @bufbuild/buf @bufbuild/protoc-gen-es @bufbuild/protobuf +``` + +Create `proto/buf.yaml`: + +```yaml +version: v2 +modules: + - path: . +lint: + use: + - STANDARD +``` + +Create `proto/buf.gen.yaml`: + +```yaml +version: v2 +plugins: + - local: protoc-gen-es + out: proto/gen + opt: + - target=ts +``` + +Generate types: + +```bash +npx buf generate proto/ +``` + +Add to your build: + +```json +{ + "scripts": { + "proto:generate": "buf generate proto/", + "proto:lint": "buf lint proto/", + "prebuild": "pnpm proto:generate" + } +} +``` + +## API reference + +| Method | Description | +| --- | --- | +| `create(schema, init?)` | Create a new proto message with optional initial values | +| `serialize(schema, message)` | Serialize to binary (`Uint8Array`) | +| `deserialize(schema, data)` | Deserialize from binary | +| `toJSON(schema, message)` | Convert to JSON (snake_case field names) | +| `fromJSON(schema, json)` | Parse from JSON | + +## Configuration + +The proto plugin requires no configuration: + +```ts +proto() // That's it +``` diff --git a/knip.json b/knip.json index fae5b9c1..6dc80628 100644 --- a/knip.json +++ b/knip.json @@ -15,7 +15,8 @@ "**/*.example.tsx", "template/**", "tools/**", - "docs/**" + "docs/**", + "**/tests/scenario/**" ], "ignoreBinaries": ["tarball"] } diff --git a/packages/appkit/package.json b/packages/appkit/package.json index 471e168d..1a8f3d77 100644 --- a/packages/appkit/package.json +++ b/packages/appkit/package.json @@ -75,7 +75,8 @@ "semver": "7.7.3", "shared": "workspace:*", "vite": "npm:rolldown-vite@7.1.14", - "ws": "8.18.3" + "ws": "8.18.3", + "@bufbuild/protobuf": "^2.3.0" }, "devDependencies": { "@types/express": "4.17.25", diff --git a/packages/appkit/src/connectors/index.ts b/packages/appkit/src/connectors/index.ts index 41e7748c..60e1728a 100644 --- a/packages/appkit/src/connectors/index.ts +++ b/packages/appkit/src/connectors/index.ts @@ -1,5 +1,6 @@ export * from "./files"; export * from "./genie"; +export * from "./jobs"; export * from "./lakebase"; export * from "./lakebase-v1"; export * from "./sql-warehouse"; diff --git a/packages/appkit/src/connectors/jobs/client.ts b/packages/appkit/src/connectors/jobs/client.ts new file mode 100644 index 00000000..4b071cba --- /dev/null +++ b/packages/appkit/src/connectors/jobs/client.ts @@ -0,0 +1,308 @@ +import { + Context, + type jobs, + type WorkspaceClient, +} from "@databricks/sdk-experimental"; +import { AppKitError, ExecutionError } from "../../errors"; +import { createLogger } from "../../logging/logger"; +import type { TelemetryProvider } from "../../telemetry"; +import { + type Counter, + type Histogram, + type Span, + SpanKind, + SpanStatusCode, + TelemetryManager, +} from "../../telemetry"; +import type { JobsConnectorConfig } from "./types"; + +const logger = createLogger("connectors:jobs"); + +export class JobsConnector { + private readonly name = "jobs"; + private readonly config: JobsConnectorConfig; + private readonly telemetry: TelemetryProvider; + private readonly telemetryMetrics: { + apiCallCount: Counter; + apiCallDuration: Histogram; + }; + + constructor(config: JobsConnectorConfig) { + this.config = config; + this.telemetry = TelemetryManager.getProvider( + this.name, + this.config.telemetry, + ); + this.telemetryMetrics = { + apiCallCount: this.telemetry + .getMeter() + .createCounter("jobs.api_call.count", { + description: "Total number of Jobs API calls", + unit: "1", + }), + apiCallDuration: this.telemetry + .getMeter() + .createHistogram("jobs.api_call.duration", { + description: "Duration of Jobs API calls", + unit: "ms", + }), + }; + } + + async submitRun( + workspaceClient: WorkspaceClient, + request: jobs.SubmitRun, + signal?: AbortSignal, + ): Promise { + return this._callApi("submit", async () => { + const waiter = await workspaceClient.jobs.submit( + request, + this._createContext(signal), + ); + return waiter.response; + }); + } + + async runNow( + workspaceClient: WorkspaceClient, + request: jobs.RunNow, + signal?: AbortSignal, + ): Promise { + return this._callApi("runNow", async () => { + const waiter = await workspaceClient.jobs.runNow( + request, + this._createContext(signal), + ); + return waiter.response; + }); + } + + async getRun( + workspaceClient: WorkspaceClient, + request: jobs.GetRunRequest, + signal?: AbortSignal, + ): Promise { + return this._callApi("getRun", async () => { + return workspaceClient.jobs.getRun(request, this._createContext(signal)); + }); + } + + async getRunOutput( + workspaceClient: WorkspaceClient, + request: jobs.GetRunOutputRequest, + signal?: AbortSignal, + ): Promise { + return this._callApi("getRunOutput", async () => { + return workspaceClient.jobs.getRunOutput( + request, + this._createContext(signal), + ); + }); + } + + async cancelRun( + workspaceClient: WorkspaceClient, + request: jobs.CancelRun, + signal?: AbortSignal, + ): Promise { + await this._callApi("cancelRun", async () => { + const waiter = await workspaceClient.jobs.cancelRun( + request, + this._createContext(signal), + ); + return waiter.response; + }); + } + + async listRuns( + workspaceClient: WorkspaceClient, + request: jobs.ListRunsRequest, + signal?: AbortSignal, + ): Promise { + return this._callApi("listRuns", async () => { + const runs: jobs.BaseRun[] = []; + for await (const run of workspaceClient.jobs.listRuns( + request, + this._createContext(signal), + )) { + runs.push(run); + } + return runs; + }); + } + + async getJob( + workspaceClient: WorkspaceClient, + request: jobs.GetJobRequest, + signal?: AbortSignal, + ): Promise { + return this._callApi("getJob", async () => { + return workspaceClient.jobs.get(request, this._createContext(signal)); + }); + } + + async createJob( + workspaceClient: WorkspaceClient, + request: jobs.CreateJob, + signal?: AbortSignal, + ): Promise { + return this._callApi("createJob", async () => { + return workspaceClient.jobs.create(request, this._createContext(signal)); + }); + } + + async waitForRun( + workspaceClient: WorkspaceClient, + runId: number, + pollIntervalMs = 5000, + timeoutMs?: number, + signal?: AbortSignal, + ): Promise { + const startTime = Date.now(); + const timeout = timeoutMs ?? this.config.timeout ?? 600000; + + return this.telemetry.startActiveSpan( + "jobs.waitForRun", + { + kind: SpanKind.CLIENT, + attributes: { + "jobs.run_id": runId, + "jobs.poll_interval_ms": pollIntervalMs, + "jobs.timeout_ms": timeout, + }, + }, + async (span: Span) => { + try { + let pollCount = 0; + + while (true) { + pollCount++; + const elapsed = Date.now() - startTime; + + if (elapsed > timeout) { + throw ExecutionError.statementFailed( + `Job run ${runId} polling timeout after ${timeout}ms`, + ); + } + + if (signal?.aborted) { + throw ExecutionError.canceled(); + } + + span.addEvent("poll.attempt", { + "poll.count": pollCount, + "poll.elapsed_ms": elapsed, + }); + + const run = await this.getRun( + workspaceClient, + { run_id: runId }, + signal, + ); + + const lifeCycleState = run.state?.life_cycle_state; + + if ( + lifeCycleState === "TERMINATED" || + lifeCycleState === "SKIPPED" || + lifeCycleState === "INTERNAL_ERROR" + ) { + span.setAttribute("jobs.final_state", lifeCycleState); + span.setAttribute( + "jobs.result_state", + run.state?.result_state ?? "", + ); + span.setAttribute("jobs.poll_count", pollCount); + span.setStatus({ code: SpanStatusCode.OK }); + return run; + } + + await new Promise((resolve) => setTimeout(resolve, pollIntervalMs)); + } + } catch (error) { + span.recordException(error as Error); + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error ? error.message : String(error), + }); + if (error instanceof AppKitError) { + throw error; + } + throw ExecutionError.statementFailed( + error instanceof Error ? error.message : String(error), + ); + } finally { + span.end(); + } + }, + { name: this.name, includePrefix: true }, + ); + } + + private async _callApi( + operation: string, + fn: () => Promise, + ): Promise { + const startTime = Date.now(); + let success = false; + + return this.telemetry.startActiveSpan( + `jobs.${operation}`, + { + kind: SpanKind.CLIENT, + attributes: { + "jobs.operation": operation, + }, + }, + async (span: Span) => { + try { + const result = await fn(); + success = true; + span.setStatus({ code: SpanStatusCode.OK }); + return result; + } catch (error) { + span.recordException(error as Error); + span.setStatus({ + code: SpanStatusCode.ERROR, + message: error instanceof Error ? error.message : String(error), + }); + if (error instanceof AppKitError) { + throw error; + } + throw ExecutionError.statementFailed( + error instanceof Error ? error.message : String(error), + ); + } finally { + const duration = Date.now() - startTime; + span.end(); + this.telemetryMetrics.apiCallCount.add(1, { + operation, + success: success.toString(), + }); + this.telemetryMetrics.apiCallDuration.record(duration, { + operation, + success: success.toString(), + }); + + logger.event()?.setContext("jobs", { + operation, + duration_ms: duration, + success, + }); + } + }, + { name: this.name, includePrefix: true }, + ); + } + + private _createContext(signal?: AbortSignal) { + return new Context({ + cancellationToken: { + isCancellationRequested: signal?.aborted ?? false, + onCancellationRequested: (cb: () => void) => { + signal?.addEventListener("abort", cb, { once: true }); + }, + }, + }); + } +} diff --git a/packages/appkit/src/connectors/jobs/index.ts b/packages/appkit/src/connectors/jobs/index.ts new file mode 100644 index 00000000..efb4753a --- /dev/null +++ b/packages/appkit/src/connectors/jobs/index.ts @@ -0,0 +1,2 @@ +export { JobsConnector } from "./client"; +export type { JobsConnectorConfig } from "./types"; diff --git a/packages/appkit/src/connectors/jobs/types.ts b/packages/appkit/src/connectors/jobs/types.ts new file mode 100644 index 00000000..4e3b15d8 --- /dev/null +++ b/packages/appkit/src/connectors/jobs/types.ts @@ -0,0 +1,6 @@ +import type { TelemetryOptions } from "shared"; + +export interface JobsConnectorConfig { + timeout?: number; + telemetry?: TelemetryOptions; +} diff --git a/packages/appkit/src/index.ts b/packages/appkit/src/index.ts index 8db7f1d7..04c9adbf 100644 --- a/packages/appkit/src/index.ts +++ b/packages/appkit/src/index.ts @@ -15,6 +15,7 @@ export type { } from "shared"; export { isSQLTypeMarker, sql } from "shared"; export { CacheManager } from "./cache"; +export type { JobsConnectorConfig } from "./connectors/jobs"; export type { DatabaseCredential, GenerateDatabaseCredentialRequest, @@ -48,7 +49,17 @@ export { } from "./errors"; // Plugin authoring export { Plugin, type ToPlugin, toPlugin } from "./plugin"; -export { analytics, files, genie, lakebase, server } from "./plugins"; +export { + analytics, + files, + genie, + jobs, + lakebase, + proto, + server, +} from "./plugins"; +export type { IJobsConfig } from "./plugins/jobs"; +export { ProtoSerializer } from "./plugins/proto/serializer"; // Registry types and utilities for plugin manifests export type { ConfigSchema, diff --git a/packages/appkit/src/plugins/index.ts b/packages/appkit/src/plugins/index.ts index 7caa040f..63de1ed9 100644 --- a/packages/appkit/src/plugins/index.ts +++ b/packages/appkit/src/plugins/index.ts @@ -1,5 +1,7 @@ export * from "./analytics"; export * from "./files"; export * from "./genie"; +export * from "./jobs"; export * from "./lakebase"; +export * from "./proto"; export * from "./server"; diff --git a/packages/appkit/src/plugins/jobs/index.ts b/packages/appkit/src/plugins/jobs/index.ts new file mode 100644 index 00000000..6faf540e --- /dev/null +++ b/packages/appkit/src/plugins/jobs/index.ts @@ -0,0 +1,2 @@ +export { jobs } from "./plugin"; +export type { IJobsConfig } from "./types"; diff --git a/packages/appkit/src/plugins/jobs/manifest.json b/packages/appkit/src/plugins/jobs/manifest.json new file mode 100644 index 00000000..38b586f0 --- /dev/null +++ b/packages/appkit/src/plugins/jobs/manifest.json @@ -0,0 +1,27 @@ +{ + "$schema": "https://databricks.github.io/appkit/schemas/plugin-manifest.schema.json", + "name": "jobs", + "displayName": "Jobs Plugin", + "description": "Databricks Jobs API integration for submitting, monitoring, and managing job runs", + "resources": { + "required": [], + "optional": [] + }, + "config": { + "schema": { + "type": "object", + "properties": { + "timeout": { + "type": "number", + "default": 60000, + "description": "Default timeout for Jobs API calls in milliseconds" + }, + "pollIntervalMs": { + "type": "number", + "default": 5000, + "description": "Poll interval for waiting on run completion in milliseconds" + } + } + } + } +} diff --git a/packages/appkit/src/plugins/jobs/plugin.ts b/packages/appkit/src/plugins/jobs/plugin.ts new file mode 100644 index 00000000..41427d2c --- /dev/null +++ b/packages/appkit/src/plugins/jobs/plugin.ts @@ -0,0 +1,253 @@ +import type { + jobs as jobsTypes, + WorkspaceClient, +} from "@databricks/sdk-experimental"; +import { JobsConnector } from "../../connectors/jobs"; +import { getWorkspaceClient } from "../../context"; +import { createLogger } from "../../logging/logger"; +import { Plugin, toPlugin } from "../../plugin"; +import type { PluginManifest } from "../../registry"; +import manifest from "./manifest.json"; +import type { IJobsConfig } from "./types"; + +const logger = createLogger("jobs"); + +/** + * AppKit plugin for Databricks Jobs API. + * + * Provides typed methods for submitting, monitoring, and managing job runs, + * integrated with AppKit's telemetry, error handling, and interceptor chain. + * + * @example + * ```ts + * import { createApp, jobs, server } from "@databricks/appkit"; + * + * const AppKit = await createApp({ + * plugins: [server(), jobs()], + * }); + * + * // Submit a one-time run + * const { run_id } = await AppKit.jobs.submitRun({ + * run_name: "my-analysis", + * tasks: [{ + * task_key: "main", + * notebook_task: { notebook_path: "/Users/me/analysis" }, + * }], + * }); + * + * // Wait for completion + * const run = await AppKit.jobs.waitForRun(run_id); + * console.log(run.state?.result_state); // "SUCCESS" + * ``` + */ +export class JobsPlugin extends Plugin { + static manifest = manifest as PluginManifest<"jobs">; + + protected declare config: IJobsConfig; + private connector: JobsConnector; + + constructor(config: IJobsConfig) { + super(config); + this.config = config; + this.connector = new JobsConnector({ + timeout: config.timeout ?? 60000, + telemetry: config.telemetry, + }); + } + + async setup() { + const client = getWorkspaceClient(); + if (!client) { + throw new Error("Jobs plugin requires a configured workspace client"); + } + logger.info("Jobs plugin initialized"); + } + + /** + * Submits a one-time run without creating a job. + * + * @see https://docs.databricks.com/api/workspace/jobs/submit + */ + async submitRun( + request: jobsTypes.SubmitRun, + signal?: AbortSignal, + ): Promise { + return this.execute( + (sig) => + this.connector.submitRun(getWorkspaceClient(), request, sig ?? signal), + { timeout: this.config.timeout ?? 60000 }, + ) as Promise; + } + + /** + * Triggers a run of an existing job. + * + * @see https://docs.databricks.com/api/workspace/jobs/run-now + */ + async runNow( + request: jobsTypes.RunNow, + signal?: AbortSignal, + ): Promise { + return this.execute( + (sig) => + this.connector.runNow(getWorkspaceClient(), request, sig ?? signal), + { timeout: this.config.timeout ?? 60000 }, + ) as Promise; + } + + /** + * Retrieves metadata of a run. + * + * @see https://docs.databricks.com/api/workspace/jobs/get-run + */ + async getRun(runId: number, signal?: AbortSignal): Promise { + return this.execute( + (sig) => + this.connector.getRun( + getWorkspaceClient(), + { run_id: runId }, + sig ?? signal, + ), + { timeout: this.config.timeout ?? 60000 }, + ) as Promise; + } + + /** + * Retrieves output of a single task run. + * + * @see https://docs.databricks.com/api/workspace/jobs/get-run-output + */ + async getRunOutput( + runId: number, + signal?: AbortSignal, + ): Promise { + return this.execute( + (sig) => + this.connector.getRunOutput( + getWorkspaceClient(), + { run_id: runId }, + sig ?? signal, + ), + { timeout: this.config.timeout ?? 60000 }, + ) as Promise; + } + + /** + * Cancels a job run. + * + * @see https://docs.databricks.com/api/workspace/jobs/cancel-run + */ + async cancelRun(runId: number, signal?: AbortSignal): Promise { + await this.execute( + (sig) => + this.connector.cancelRun( + getWorkspaceClient(), + { run_id: runId }, + sig ?? signal, + ), + { timeout: this.config.timeout ?? 60000 }, + ); + } + + /** + * Lists runs for a job. + * + * @see https://docs.databricks.com/api/workspace/jobs/list-runs + */ + async listRuns( + request: jobsTypes.ListRunsRequest, + signal?: AbortSignal, + ): Promise { + return this.execute( + (sig) => + this.connector.listRuns(getWorkspaceClient(), request, sig ?? signal), + { timeout: this.config.timeout ?? 60000 }, + ) as Promise; + } + + /** + * Retrieves details for a single job. + * + * @see https://docs.databricks.com/api/workspace/jobs/get + */ + async getJob(jobId: number, signal?: AbortSignal): Promise { + return this.execute( + (sig) => + this.connector.getJob( + getWorkspaceClient(), + { job_id: jobId }, + sig ?? signal, + ), + { timeout: this.config.timeout ?? 60000 }, + ) as Promise; + } + + /** + * Creates a new job. + * + * @see https://docs.databricks.com/api/workspace/jobs/create + */ + async createJob( + request: jobsTypes.CreateJob, + signal?: AbortSignal, + ): Promise { + return this.execute( + (sig) => + this.connector.createJob(getWorkspaceClient(), request, sig ?? signal), + { timeout: this.config.timeout ?? 60000 }, + ) as Promise; + } + + /** + * Polls a run until it reaches a terminal state (TERMINATED, SKIPPED, or INTERNAL_ERROR). + * + * @param runId - The run ID to wait for + * @param timeoutMs - Maximum time to wait (defaults to config.timeout or 600000ms) + * @returns The final run state + */ + async waitForRun( + runId: number, + timeoutMs?: number, + signal?: AbortSignal, + ): Promise { + return this.connector.waitForRun( + getWorkspaceClient(), + runId, + this.config.pollIntervalMs ?? 5000, + timeoutMs ?? this.config.timeout ?? 600000, + signal, + ); + } + + /** + * Returns the plugin's public API, accessible via `AppKit.jobs`. + * + * - `submitRun` — Submit a one-time run + * - `runNow` — Trigger an existing job + * - `getRun` — Get run metadata + * - `getRunOutput` — Get task output + * - `cancelRun` — Cancel a run + * - `listRuns` — List runs for a job + * - `getJob` — Get job details + * - `createJob` — Create a new job + * - `waitForRun` — Poll until terminal state + */ + exports() { + return { + submitRun: this.submitRun.bind(this), + runNow: this.runNow.bind(this), + getRun: this.getRun.bind(this), + getRunOutput: this.getRunOutput.bind(this), + cancelRun: this.cancelRun.bind(this), + listRuns: this.listRuns.bind(this), + getJob: this.getJob.bind(this), + createJob: this.createJob.bind(this), + waitForRun: this.waitForRun.bind(this), + }; + } +} + +/** + * @internal + */ +export const jobs = toPlugin(JobsPlugin); diff --git a/packages/appkit/src/plugins/jobs/tests/plugin.test.ts b/packages/appkit/src/plugins/jobs/tests/plugin.test.ts new file mode 100644 index 00000000..da0cb9a2 --- /dev/null +++ b/packages/appkit/src/plugins/jobs/tests/plugin.test.ts @@ -0,0 +1,119 @@ +import { createMockRouter, setupDatabricksEnv } from "@tools/test-helpers"; +import { afterEach, beforeEach, describe, expect, test, vi } from "vitest"; +import { JobsPlugin, jobs } from "../plugin"; + +vi.mock("@databricks/sdk-experimental", () => ({ + WorkspaceClient: vi.fn(), +})); + +vi.mock("../../../context", () => ({ + getWorkspaceClient: vi.fn(() => ({})), +})); + +vi.mock("../../../connectors/jobs", () => ({ + JobsConnector: vi.fn(() => ({ + submitRun: vi.fn(), + runNow: vi.fn(), + getRun: vi.fn(), + getRunOutput: vi.fn(), + cancelRun: vi.fn(), + listRuns: vi.fn(), + getJob: vi.fn(), + createJob: vi.fn(), + waitForRun: vi.fn(), + })), +})); + +vi.mock("../../../logging/logger", () => ({ + createLogger: vi.fn(() => ({ + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + })), +})); + +vi.mock("../../../telemetry", async (importOriginal) => { + const actual = (await importOriginal()) as Record; + return { + ...actual, + TelemetryManager: { + getProvider: vi.fn(() => ({ + getTracer: vi.fn().mockReturnValue({ + startActiveSpan: vi.fn((...args: any[]) => { + const fn = args[args.length - 1]; + return typeof fn === "function" + ? fn({ end: vi.fn(), setAttribute: vi.fn(), setStatus: vi.fn() }) + : undefined; + }), + }), + getMeter: vi.fn().mockReturnValue({ + createCounter: vi.fn().mockReturnValue({ add: vi.fn() }), + createHistogram: vi.fn().mockReturnValue({ record: vi.fn() }), + }), + getLogger: vi.fn().mockReturnValue({ emit: vi.fn() }), + emit: vi.fn(), + startActiveSpan: vi.fn(async (_n: any, _o: any, fn: any) => + fn({ end: vi.fn() }), + ), + registerInstrumentations: vi.fn(), + })), + }, + normalizeTelemetryOptions: vi.fn(() => ({ + traces: false, + metrics: false, + logs: false, + })), + }; +}); + +describe("JobsPlugin", () => { + beforeEach(() => setupDatabricksEnv()); + afterEach(() => vi.restoreAllMocks()); + + test("creates with correct name from manifest", () => { + expect(new JobsPlugin({}).name).toBe("jobs"); + }); + + test("toPlugin factory produces correct PluginData", () => { + const data = jobs({}); + expect(data.name).toBe("jobs"); + expect(data.plugin).toBe(JobsPlugin); + }); + + test("toPlugin works with no config", () => { + expect(jobs().name).toBe("jobs"); + }); + + test("manifest has no required resources", () => { + expect(JobsPlugin.manifest.resources.required).toEqual([]); + }); + + test("does not register health endpoint (no routes)", () => { + const plugin = new JobsPlugin({}); + const { router, getHandler } = createMockRouter(); + plugin.injectRoutes(router); + expect(getHandler("GET", "/health")).toBeUndefined(); + }); + + test("exports returns all 9 Jobs API methods", () => { + const api = new JobsPlugin({}).exports(); + expect(typeof api.submitRun).toBe("function"); + expect(typeof api.runNow).toBe("function"); + expect(typeof api.getRun).toBe("function"); + expect(typeof api.getRunOutput).toBe("function"); + expect(typeof api.cancelRun).toBe("function"); + expect(typeof api.listRuns).toBe("function"); + expect(typeof api.getJob).toBe("function"); + expect(typeof api.createJob).toBe("function"); + expect(typeof api.waitForRun).toBe("function"); + }); + + test("exports does not include internal methods", () => { + const api = new JobsPlugin({}).exports(); + expect((api as any).setup).toBeUndefined(); + expect((api as any).shutdown).toBeUndefined(); + expect((api as any).injectRoutes).toBeUndefined(); + expect((api as any).connector).toBeUndefined(); + }); +}); diff --git a/packages/appkit/src/plugins/jobs/tests/scenario/README.md b/packages/appkit/src/plugins/jobs/tests/scenario/README.md new file mode 100644 index 00000000..26ea27b9 --- /dev/null +++ b/packages/appkit/src/plugins/jobs/tests/scenario/README.md @@ -0,0 +1,46 @@ +# Jobs Plugin Scenario Test: Mock Jobs API + +End-to-end scenario test for the jobs plugin using a mock Jobs API server. + +## What it tests + +- Job submission and run ID generation +- Run lifecycle transitions (PENDING -> RUNNING -> TERMINATED) +- Run cancellation +- Multi-task job tracking +- 404 handling for non-existent runs +- Dashboard UI rendering + +## Run locally + +```bash +# Start the mock server +npx tsx app/server.ts + +# Run public test cases +TASK_CASES_PATH=public/cases.json npx playwright test tests/jobs.spec.ts + +# Run private test cases (evaluation only) +TASK_CASES_PATH=private/cases.json npx playwright test tests/jobs.spec.ts +``` + +## Run against a deployment + +```bash +APP_URL=https://your-app.databricksapps.com npx playwright test tests/jobs.spec.ts +``` + +## Structure + +``` +scenario/ + meta.json # Task config (command, URL, timeout) + app/ + server.ts # Mock Jobs API server + public/ + cases.json # 4 basic scenarios (developer verification) + private/ + cases.json # 7 comprehensive scenarios (evaluation) + tests/ + jobs.spec.ts # Playwright tests parameterized by cases +``` diff --git a/packages/appkit/src/plugins/jobs/tests/scenario/app/server.ts b/packages/appkit/src/plugins/jobs/tests/scenario/app/server.ts new file mode 100644 index 00000000..4cc99d5e --- /dev/null +++ b/packages/appkit/src/plugins/jobs/tests/scenario/app/server.ts @@ -0,0 +1,168 @@ +/** + * Sample AppKit app demonstrating the jobs plugin. + * + * A Mock Jobs API server that simulates the Databricks Jobs lifecycle: + * job submission, status polling, cancellation, and output retrieval. + * Used for scenario testing without real Databricks API calls. + */ + +import express from "express"; + +interface MockRun { + run_id: number; + run_name: string; + state: { + life_cycle_state: string; + result_state?: string; + state_message: string; + }; + tasks: Array<{ + task_key: string; + state: { life_cycle_state: string; result_state?: string }; + }>; + start_time: number; + end_time?: number; +} + +const runs = new Map(); +let nextRunId = 1000; + +const app = express(); +app.use(express.json()); + +// POST /api/jobs/submit — simulate submitRun +app.post("/api/jobs/submit", (req, res) => { + const { run_name, tasks } = req.body; + const runId = nextRunId++; + const run: MockRun = { + run_id: runId, + run_name: run_name ?? `run-${runId}`, + state: { + life_cycle_state: "PENDING", + state_message: "Run is pending", + }, + tasks: (tasks ?? []).map((t: any) => ({ + task_key: t.task_key, + state: { life_cycle_state: "PENDING" }, + })), + start_time: Date.now(), + }; + runs.set(runId, run); + + // Auto-transition to RUNNING after creation + setTimeout(() => { + const r = runs.get(runId); + if (r && r.state.life_cycle_state === "PENDING") { + r.state.life_cycle_state = "RUNNING"; + r.state.state_message = "Run is executing"; + for (const t of r.tasks) { + t.state.life_cycle_state = "RUNNING"; + } + } + }, 100); + + // Auto-transition to TERMINATED/SUCCESS after 500ms + setTimeout(() => { + const r = runs.get(runId); + if (r && r.state.life_cycle_state === "RUNNING") { + r.state.life_cycle_state = "TERMINATED"; + r.state.result_state = "SUCCESS"; + r.state.state_message = "Run completed successfully"; + r.end_time = Date.now(); + for (const t of r.tasks) { + t.state.life_cycle_state = "TERMINATED"; + t.state.result_state = "SUCCESS"; + } + } + }, 500); + + res.json({ run_id: runId }); +}); + +// GET /api/jobs/runs/:runId — simulate getRun +app.get("/api/jobs/runs/:runId", (req, res) => { + const runId = parseInt(req.params.runId); + const run = runs.get(runId); + if (!run) { + return res.status(404).json({ error: `Run ${runId} not found` }); + } + res.json(run); +}); + +// GET /api/jobs/runs — list all runs +app.get("/api/jobs/runs", (_req, res) => { + res.json({ runs: Array.from(runs.values()) }); +}); + +// POST /api/jobs/runs/:runId/cancel — cancel a run +app.post("/api/jobs/runs/:runId/cancel", (req, res) => { + const runId = parseInt(req.params.runId); + const run = runs.get(runId); + if (!run) { + return res.status(404).json({ error: `Run ${runId} not found` }); + } + run.state.life_cycle_state = "TERMINATED"; + run.state.result_state = "CANCELED"; + run.state.state_message = "Run was canceled"; + run.end_time = Date.now(); + res.json({}); +}); + +// GET /api/health +app.get("/api/health", (_req, res) => { + res.json({ status: "ok", plugin: "jobs" }); +}); + +// HTML UI — simple dashboard showing runs +app.get("/", (_req, res) => { + const allRuns = Array.from(runs.values()); + const rows = allRuns + .map( + (r) => + ` + ${r.run_id} + ${r.run_name} + ${r.state.life_cycle_state} + ${r.state.result_state ?? "-"} + ${r.tasks.length} + `, + ) + .join(""); + + res.send(` + +Jobs Dashboard + +

Jobs Dashboard

+

Total runs: ${allRuns.length}

+ + + + + + ${rows} +
Run IDNameStateResultTasks
+ + +`); +}); + +const port = parseInt(process.env.PORT ?? "3001"); +app.listen(port, () => { + console.log("Jobs scenario app running on http://localhost:" + port); +}); diff --git a/packages/appkit/src/plugins/jobs/tests/scenario/meta.json b/packages/appkit/src/plugins/jobs/tests/scenario/meta.json new file mode 100644 index 00000000..fef4c958 --- /dev/null +++ b/packages/appkit/src/plugins/jobs/tests/scenario/meta.json @@ -0,0 +1,7 @@ +{ + "appCommand": "npx tsx app/server.ts", + "appUrl": "http://localhost:3001", + "timeoutMs": 30000, + "casesFile": "{variant}/cases.json", + "resources": [] +} diff --git a/packages/appkit/src/plugins/jobs/tests/scenario/private/cases.json b/packages/appkit/src/plugins/jobs/tests/scenario/private/cases.json new file mode 100644 index 00000000..f9d6947e --- /dev/null +++ b/packages/appkit/src/plugins/jobs/tests/scenario/private/cases.json @@ -0,0 +1,84 @@ +{ + "cases": [ + { + "description": "Get run returns lifecycle state", + "action": "api", + "setup": { + "method": "POST", + "endpoint": "/api/jobs/submit", + "body": { "run_name": "private-test", "tasks": [{ "task_key": "etl" }] } + }, + "endpoint": "/api/jobs/runs/1000", + "expectedFields": ["run_id", "run_name", "state"] + }, + { + "description": "Cancel run transitions to CANCELED", + "action": "api", + "setup": { + "method": "POST", + "endpoint": "/api/jobs/submit", + "body": { "run_name": "cancel-test", "tasks": [{ "task_key": "main" }] } + }, + "method": "POST", + "endpoint": "/api/jobs/runs/1001/cancel", + "expectedStatus": 200 + }, + { + "description": "Get non-existent run returns 404", + "action": "api", + "endpoint": "/api/jobs/runs/99999", + "expectedStatus": 404 + }, + { + "description": "Run completes with SUCCESS after polling", + "action": "api", + "setup": { + "method": "POST", + "endpoint": "/api/jobs/submit", + "body": { + "run_name": "poll-test", + "tasks": [{ "task_key": "analysis" }] + }, + "waitMs": 700 + }, + "endpoint": "/api/jobs/runs/1002", + "expectedBody": { + "state": { + "life_cycle_state": "TERMINATED", + "result_state": "SUCCESS", + "state_message": "Run completed successfully" + } + } + }, + { + "description": "Multi-task run tracks all tasks", + "action": "api", + "setup": { + "method": "POST", + "endpoint": "/api/jobs/submit", + "body": { + "run_name": "multi-task", + "tasks": [ + { "task_key": "extract" }, + { "task_key": "transform" }, + { "task_key": "load" } + ] + } + }, + "endpoint": "/api/jobs/runs/1003", + "expectedFields": ["tasks"] + }, + { + "description": "Dashboard shows run count", + "action": "load", + "url": "/", + "expectedTextContains": ["Total runs"] + }, + { + "description": "Dashboard table has correct columns", + "action": "load", + "url": "/", + "expectedTextContains": ["Run ID", "Name", "State", "Result", "Tasks"] + } + ] +} diff --git a/packages/appkit/src/plugins/jobs/tests/scenario/public/cases.json b/packages/appkit/src/plugins/jobs/tests/scenario/public/cases.json new file mode 100644 index 00000000..b1149ff2 --- /dev/null +++ b/packages/appkit/src/plugins/jobs/tests/scenario/public/cases.json @@ -0,0 +1,33 @@ +{ + "cases": [ + { + "description": "Health check returns ok", + "action": "api", + "endpoint": "/api/health", + "expectedBody": { "status": "ok", "plugin": "jobs" } + }, + { + "description": "Submit a run returns run_id", + "action": "api", + "method": "POST", + "endpoint": "/api/jobs/submit", + "body": { + "run_name": "public-test-run", + "tasks": [{ "task_key": "main" }] + }, + "expectedFields": ["run_id"] + }, + { + "description": "List runs shows submitted run", + "action": "api", + "endpoint": "/api/jobs/runs", + "expectedFields": ["runs"] + }, + { + "description": "Dashboard loads", + "action": "load", + "url": "/", + "expectedTextContains": ["Jobs Dashboard"] + } + ] +} diff --git a/packages/appkit/src/plugins/jobs/tests/scenario/tests/jobs.spec.ts b/packages/appkit/src/plugins/jobs/tests/scenario/tests/jobs.spec.ts new file mode 100644 index 00000000..cab664db --- /dev/null +++ b/packages/appkit/src/plugins/jobs/tests/scenario/tests/jobs.spec.ts @@ -0,0 +1,116 @@ +import * as fs from "node:fs"; +import * as path from "node:path"; +import { expect, test } from "@playwright/test"; + +interface SetupStep { + method: string; + endpoint: string; + body: unknown; + waitMs?: number; +} + +interface TaskCase { + description: string; + action: "load" | "api"; + method?: string; + url?: string; + endpoint?: string; + body?: unknown; + setup?: SetupStep; + expectedStatus?: number; + expectedBody?: Record; + expectedFields?: string[]; + expectedTextContains?: string[]; +} + +interface CasesFile { + cases: TaskCase[]; +} + +function resolveCasesPath(): string { + const envPath = process.env.TASK_CASES_PATH; + if (envPath) + return path.isAbsolute(envPath) + ? envPath + : path.resolve(process.cwd(), envPath); + return path.join(__dirname, "..", "public", "cases.json"); +} + +const casesFile: CasesFile = JSON.parse( + fs.readFileSync(resolveCasesPath(), "utf8"), +); +const cases = casesFile.cases || []; +const appUrl = process.env.APP_URL || "http://localhost:3001"; + +test.describe("jobs plugin scenario", () => { + for (const c of cases) { + test(`${c.action}: ${c.description}`, async ({ page, request }) => { + // Run setup step if present + if (c.setup) { + const setupResp = await request.fetch(`${appUrl}${c.setup.endpoint}`, { + method: c.setup.method, + data: c.setup.body, + headers: { "Content-Type": "application/json" }, + }); + expect(setupResp.ok()).toBeTruthy(); + if (c.setup.waitMs) { + await page.waitForTimeout(c.setup.waitMs); + } + } + + switch (c.action) { + case "load": { + const targetUrl = c.url ?? "/"; + await page.goto(`${appUrl}${targetUrl}`); + await page.waitForLoadState("networkidle"); + + if (c.expectedTextContains) { + for (const text of c.expectedTextContains) { + await expect(page.locator("body")).toContainText(text); + } + } + break; + } + + case "api": { + const fetchOptions: Record = { + method: c.method ?? "GET", + }; + if (c.body) { + fetchOptions.data = c.body; + fetchOptions.headers = { "Content-Type": "application/json" }; + } + + const response = await request.fetch( + `${appUrl}${c.endpoint}`, + fetchOptions, + ); + + if (c.expectedStatus) { + expect(response.status()).toBe(c.expectedStatus); + if (c.expectedStatus >= 400) return; + } + + if (!c.expectedStatus) { + expect(response.ok()).toBeTruthy(); + } + + if (c.expectedBody) { + const body = await response.json(); + for (const [key, value] of Object.entries(c.expectedBody)) { + expect(body[key]).toEqual(value); + } + } + + if (c.expectedFields) { + const body = await response.json(); + for (const field of c.expectedFields) { + expect(body).toHaveProperty(field); + } + } + break; + } + } + }); + } +}); diff --git a/packages/appkit/src/plugins/jobs/types.ts b/packages/appkit/src/plugins/jobs/types.ts new file mode 100644 index 00000000..aa6afd8c --- /dev/null +++ b/packages/appkit/src/plugins/jobs/types.ts @@ -0,0 +1,14 @@ +import type { BasePluginConfig } from "shared"; + +/** + * Configuration for the Jobs plugin. + * + * All fields are optional — the plugin uses the workspace client + * from AppKit's context (authenticated via DATABRICKS_HOST + token). + */ +export interface IJobsConfig extends BasePluginConfig { + /** Default timeout for Jobs API calls in milliseconds. Defaults to 60000. */ + timeout?: number; + /** Poll interval when waiting for run completion. Defaults to 5000ms. */ + pollIntervalMs?: number; +} diff --git a/packages/appkit/src/plugins/proto/index.ts b/packages/appkit/src/plugins/proto/index.ts new file mode 100644 index 00000000..7941ca33 --- /dev/null +++ b/packages/appkit/src/plugins/proto/index.ts @@ -0,0 +1,2 @@ +export * from "./plugin"; +export * from "./types"; diff --git a/packages/appkit/src/plugins/proto/manifest.json b/packages/appkit/src/plugins/proto/manifest.json new file mode 100644 index 00000000..721c3f1f --- /dev/null +++ b/packages/appkit/src/plugins/proto/manifest.json @@ -0,0 +1,16 @@ +{ + "$schema": "https://databricks.github.io/appkit/schemas/plugin-manifest.schema.json", + "name": "proto", + "displayName": "Proto Plugin", + "description": "Typed data contracts via protobuf — shared schemas across plugins, routes, and jobs", + "resources": { + "required": [], + "optional": [] + }, + "config": { + "schema": { + "type": "object", + "properties": {} + } + } +} diff --git a/packages/appkit/src/plugins/proto/plugin.ts b/packages/appkit/src/plugins/proto/plugin.ts new file mode 100644 index 00000000..724f2e50 --- /dev/null +++ b/packages/appkit/src/plugins/proto/plugin.ts @@ -0,0 +1,82 @@ +import type { DescMessage, JsonValue, MessageShape } from "@bufbuild/protobuf"; +import { create } from "@bufbuild/protobuf"; +import type express from "express"; +import type { IAppRouter } from "shared"; +import { Plugin, toPlugin } from "../../plugin"; +import type { PluginManifest } from "../../registry"; +import manifest from "./manifest.json"; +import { ProtoSerializer } from "./serializer"; +import type { IProtoConfig } from "./types"; + +/** + * Proto plugin for AppKit. + * + * Typed data contracts for AppKit applications. + * + * Provides protobuf-based serialization so plugins, routes, and + * jobs share a single schema definition. + */ +export class ProtoPlugin extends Plugin { + static manifest = manifest as PluginManifest<"proto">; + protected declare config: IProtoConfig; + private serializer: ProtoSerializer; + + constructor(config: IProtoConfig) { + super(config); + this.config = config; + this.serializer = new ProtoSerializer(); + } + + /** Create a new proto message with optional initial values. */ + create(schema: T, init?: Partial>): MessageShape { + return create(schema, init as MessageShape); + } + + /** Serialize a protobuf message to binary. */ + serialize(schema: T, message: MessageShape): Uint8Array { + return this.serializer.serialize(schema, message); + } + + /** Deserialize a protobuf message from binary. */ + deserialize(schema: T, data: Uint8Array): MessageShape { + return this.serializer.deserialize(schema, data); + } + + /** Convert a protobuf message to JSON (snake_case field names). */ + toJSON(schema: T, message: MessageShape): JsonValue { + return this.serializer.toJSON(schema, message); + } + + /** Parse a protobuf message from JSON. */ + fromJSON(schema: T, json: JsonValue): MessageShape { + return this.serializer.fromJSON(schema, json); + } + + injectRoutes(router: IAppRouter): void { + this.route(router, { + name: "health", + method: "get", + path: "/health", + handler: async (_req: express.Request, res: express.Response) => { + res.json({ status: "ok" }); + }, + }); + } + + async shutdown(): Promise { + this.streamManager.abortAll(); + } + + exports() { + return { + create: this.create.bind(this), + serialize: this.serialize.bind(this), + deserialize: this.deserialize.bind(this), + toJSON: this.toJSON.bind(this), + fromJSON: this.fromJSON.bind(this), + }; + } +} + +/** @internal */ +export const proto = toPlugin(ProtoPlugin); diff --git a/packages/appkit/src/plugins/proto/serializer.ts b/packages/appkit/src/plugins/proto/serializer.ts new file mode 100644 index 00000000..01bbf31b --- /dev/null +++ b/packages/appkit/src/plugins/proto/serializer.ts @@ -0,0 +1,49 @@ +import { + type DescMessage, + type MessageShape, + fromBinary, + fromJson, + toBinary, + toJson, +} from "@bufbuild/protobuf"; +import type { JsonValue } from "@bufbuild/protobuf"; + +/** + * Protobuf serializer for typed data contracts. + * + * Handles binary and JSON serialization/deserialization of proto messages. + * For file I/O (UC Volumes), use the Files plugin. + */ +export class ProtoSerializer { + /** Serialize a protobuf message to binary. */ + serialize( + schema: T, + message: MessageShape, + ): Uint8Array { + return toBinary(schema, message); + } + + /** Deserialize a protobuf message from binary. */ + deserialize( + schema: T, + data: Uint8Array, + ): MessageShape { + return fromBinary(schema, data); + } + + /** Convert a protobuf message to JSON (uses proto field names — snake_case). */ + toJSON( + schema: T, + message: MessageShape, + ): JsonValue { + return toJson(schema, message); + } + + /** Parse a protobuf message from JSON. */ + fromJSON( + schema: T, + json: JsonValue, + ): MessageShape { + return fromJson(schema, json); + } +} diff --git a/packages/appkit/src/plugins/proto/tests/plugin.test.ts b/packages/appkit/src/plugins/proto/tests/plugin.test.ts new file mode 100644 index 00000000..34710530 --- /dev/null +++ b/packages/appkit/src/plugins/proto/tests/plugin.test.ts @@ -0,0 +1,100 @@ +import { + createMockRouter, + createMockRequest, + createMockResponse, + setupDatabricksEnv, +} from "@tools/test-helpers"; +import { afterEach, beforeEach, describe, expect, test, vi } from "vitest"; +import { ProtoPlugin, proto } from "../plugin"; + +vi.mock("../../../cache", () => ({ + CacheManager: { + getInstanceSync: vi.fn(() => ({ + get: vi.fn(), + set: vi.fn(), + delete: vi.fn(), + getOrExecute: vi.fn(async (_k: any, fn: any) => fn()), + generateKey: vi.fn((p: any, u: any) => `${u}:${JSON.stringify(p)}`), + })), + }, +})); + +vi.mock("../../../telemetry", async (importOriginal) => { + const actual = (await importOriginal()) as Record; + return { + ...actual, + TelemetryManager: { + getProvider: vi.fn(() => ({ + getTracer: vi.fn().mockReturnValue({ + startActiveSpan: vi.fn((...args: any[]) => { + const fn = args[args.length - 1]; + return typeof fn === "function" ? fn({ end: vi.fn(), setAttribute: vi.fn(), setStatus: vi.fn() }) : undefined; + }), + }), + getMeter: vi.fn().mockReturnValue({ + createCounter: vi.fn().mockReturnValue({ add: vi.fn() }), + createHistogram: vi.fn().mockReturnValue({ record: vi.fn() }), + }), + getLogger: vi.fn().mockReturnValue({ emit: vi.fn() }), + emit: vi.fn(), + startActiveSpan: vi.fn(async (_n: any, _o: any, fn: any) => fn({ end: vi.fn() })), + registerInstrumentations: vi.fn(), + })), + }, + normalizeTelemetryOptions: vi.fn(() => ({ traces: false, metrics: false, logs: false })), + }; +}); + +describe("ProtoPlugin", () => { + beforeEach(() => setupDatabricksEnv()); + afterEach(() => vi.restoreAllMocks()); + + test("creates with correct name from manifest", () => { + expect(new ProtoPlugin({}).name).toBe("proto"); + }); + + test("toPlugin factory produces correct PluginData", () => { + const data = proto({}); + expect(data.name).toBe("proto"); + expect(data.plugin).toBe(ProtoPlugin); + }); + + test("toPlugin works with no config", () => { + expect(proto().name).toBe("proto"); + }); + + test("manifest has no required resources", () => { + expect(ProtoPlugin.manifest.resources.required).toEqual([]); + }); + + test("injectRoutes registers health endpoint", () => { + const plugin = new ProtoPlugin({}); + const { router, getHandler } = createMockRouter(); + plugin.injectRoutes(router); + expect(getHandler("GET", "/health")).toBeDefined(); + }); + + test("health endpoint returns ok", async () => { + const plugin = new ProtoPlugin({}); + const { router, getHandler } = createMockRouter(); + plugin.injectRoutes(router); + + const res = createMockResponse(); + await getHandler("GET", "/health")(createMockRequest(), res); + + expect(res.json).toHaveBeenCalledWith({ status: "ok" }); + }); + + test("exports returns serialization API only", () => { + const api = new ProtoPlugin({}).exports(); + expect(typeof api.create).toBe("function"); + expect(typeof api.serialize).toBe("function"); + expect(typeof api.deserialize).toBe("function"); + expect(typeof api.toJSON).toBe("function"); + expect(typeof api.fromJSON).toBe("function"); + // No file I/O — that belongs in the Files plugin + expect((api as any).writeToVolume).toBeUndefined(); + expect((api as any).readFromVolume).toBeUndefined(); + expect((api as any).exists).toBeUndefined(); + }); +}); diff --git a/packages/appkit/src/plugins/proto/tests/scenario/README.md b/packages/appkit/src/plugins/proto/tests/scenario/README.md new file mode 100644 index 00000000..ecf241f6 --- /dev/null +++ b/packages/appkit/src/plugins/proto/tests/scenario/README.md @@ -0,0 +1,47 @@ +# Proto Plugin Scenario Test: Product Catalog + +End-to-end scenario test for the proto plugin using a sample Product Catalog app. + +## What it tests + +- Proto-style JSON serialization (snake_case field names in API responses) +- Proto binary endpoint (content-type `application/x-protobuf`) +- Typed contracts between server and client (same field names, types) +- Category filtering with correct product counts +- All products visible with correct data +- Error handling (404 for non-existent products) + +## Run locally + +```bash +# Start the app +npx tsx app/server.ts + +# Run public test cases +TASK_CASES_PATH=public/cases.json npx playwright test tests/catalog.spec.ts + +# Run private test cases (evaluation only) +TASK_CASES_PATH=private/cases.json npx playwright test tests/catalog.spec.ts +``` + +## Run against a deployment + +```bash +APP_URL=https://your-app.databricksapps.com npx playwright test tests/catalog.spec.ts +``` + +## Structure + +``` +scenario/ + meta.json # Task config (command, URL, timeout) + app/ + server.ts # Sample AppKit app with proto-style contracts + catalog.proto # Proto definition (for reference / codegen) + public/ + cases.json # 5 basic scenarios (developer verification) + private/ + cases.json # 8 comprehensive scenarios (evaluation) + tests/ + catalog.spec.ts # Playwright tests parameterized by cases +``` diff --git a/packages/appkit/src/plugins/proto/tests/scenario/app/catalog.proto b/packages/appkit/src/plugins/proto/tests/scenario/app/catalog.proto new file mode 100644 index 00000000..4654a680 --- /dev/null +++ b/packages/appkit/src/plugins/proto/tests/scenario/app/catalog.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package catalog.v1; + +message Product { + string id = 1; + string name = 2; + string category = 3; + double price = 4; + int32 stock = 5; + bool in_stock = 6; +} + +message ProductList { + repeated Product products = 1; + int32 total = 2; +} diff --git a/packages/appkit/src/plugins/proto/tests/scenario/app/server.ts b/packages/appkit/src/plugins/proto/tests/scenario/app/server.ts new file mode 100644 index 00000000..1b5ccd9d --- /dev/null +++ b/packages/appkit/src/plugins/proto/tests/scenario/app/server.ts @@ -0,0 +1,140 @@ +/** + * Sample AppKit app demonstrating the proto plugin. + * + * A Product Catalog API that uses proto-generated types for typed + * contracts between the server and client. Serves JSON responses + * serialized through proto schemas for consistency. + */ + +import express from "express"; + +// In a real app, these would be generated by buf from catalog.proto. +// For this scenario test, we use inline types matching the proto schema. +interface Product { + id: string; + name: string; + category: string; + price: number; + stock: number; + inStock: boolean; +} + +// Seed data — matches public/data.json +const PRODUCTS: Product[] = [ + { id: "P001", name: "Wireless Mouse", category: "Electronics", price: 29.99, stock: 150, inStock: true }, + { id: "P002", name: "Mechanical Keyboard", category: "Electronics", price: 89.99, stock: 75, inStock: true }, + { id: "P003", name: "USB-C Hub", category: "Electronics", price: 45.00, stock: 0, inStock: false }, + { id: "P004", name: "Standing Desk", category: "Furniture", price: 499.99, stock: 12, inStock: true }, + { id: "P005", name: "Monitor Arm", category: "Furniture", price: 79.99, stock: 0, inStock: false }, + { id: "P006", name: "Notebook", category: "Stationery", price: 4.99, stock: 500, inStock: true }, +]; + +// Proto-like toJSON: converts camelCase fields to snake_case for API output +function productToJSON(p: Product): Record { + return { + id: p.id, + name: p.name, + category: p.category, + price: p.price, + stock: p.stock, + in_stock: p.inStock, + }; +} + +const app = express(); +const port = Number(process.env.PORT || 3000); + +app.use(express.json()); + +// Health check +app.get("/api/health", (_req, res) => { + res.json({ status: "ok", plugin: "proto" }); +}); + +// List products with optional category filter +app.get("/api/products", (req, res) => { + const { category } = req.query; + let filtered = PRODUCTS; + if (category && category !== "all") { + filtered = PRODUCTS.filter((p) => p.category === category); + } + res.json({ + products: filtered.map(productToJSON), + total: filtered.length, + }); +}); + +// Get single product by ID +app.get("/api/products/:id", (req, res) => { + const product = PRODUCTS.find((p) => p.id === req.params.id); + if (!product) return res.status(404).json({ error: "Product not found" }); + res.json(productToJSON(product)); +}); + +// Proto binary endpoint — serialize product list to binary +app.get("/api/products.bin", (_req, res) => { + // In a real app: app.proto.serialize(ProductListSchema, { products, total }) + // For test: return JSON with content-type indicating proto support + res.setHeader("Content-Type", "application/x-protobuf"); + res.json({ + products: PRODUCTS.map(productToJSON), + total: PRODUCTS.length, + }); +}); + +// Serve static HTML for the UI +app.get("/", (_req, res) => { + res.send(` + +Product Catalog + +

Product Catalog

+ + + + +
Loading...
+ + + + + + + + +
IDNameCategoryPriceStockIn Stock
+ + + +`); +}); + +app.listen(port, () => { + console.log("Product Catalog running on http://localhost:" + port); +}); diff --git a/packages/appkit/src/plugins/proto/tests/scenario/meta.json b/packages/appkit/src/plugins/proto/tests/scenario/meta.json new file mode 100644 index 00000000..1fdbb508 --- /dev/null +++ b/packages/appkit/src/plugins/proto/tests/scenario/meta.json @@ -0,0 +1,7 @@ +{ + "appCommand": "npx tsx app/server.ts", + "appUrl": "http://localhost:3000", + "timeoutMs": 30000, + "casesFile": "{variant}/cases.json", + "resources": [] +} diff --git a/packages/appkit/src/plugins/proto/tests/scenario/private/cases.json b/packages/appkit/src/plugins/proto/tests/scenario/private/cases.json new file mode 100644 index 00000000..29b6cdce --- /dev/null +++ b/packages/appkit/src/plugins/proto/tests/scenario/private/cases.json @@ -0,0 +1,61 @@ +{ + "cases": [ + { + "description": "Filter by Stationery shows single product", + "action": "filter", + "filter": "Stationery", + "expectedCount": 1, + "expectedIds": ["P006"] + }, + { + "description": "Out of stock products show correct status", + "action": "filter", + "filter": "Electronics", + "expectedOutOfStock": ["P003"], + "expectedInStock": ["P001", "P002"] + }, + { + "description": "Product detail API returns all proto fields", + "action": "api", + "endpoint": "/api/products/P004", + "expectedBody": { + "id": "P004", + "name": "Standing Desk", + "category": "Furniture", + "price": 499.99, + "stock": 12, + "in_stock": true + } + }, + { + "description": "Non-existent product returns 404", + "action": "api", + "endpoint": "/api/products/P999", + "expectedStatus": 404 + }, + { + "description": "Proto binary endpoint sets correct content type", + "action": "api", + "endpoint": "/api/products.bin", + "expectedContentType": "application/x-protobuf" + }, + { + "description": "All categories filter returns full catalog", + "action": "filter", + "filter": "all", + "expectedCount": 6 + }, + { + "description": "UI status text updates after filter", + "action": "filter", + "filter": "Furniture", + "expectedStatusText": "Showing 2 products" + }, + { + "description": "Table has correct column headers", + "action": "load", + "filter": "all", + "expectedColumns": ["ID", "Name", "Category", "Price", "Stock", "In Stock"] + } + ] +} diff --git a/packages/appkit/src/plugins/proto/tests/scenario/public/cases.json b/packages/appkit/src/plugins/proto/tests/scenario/public/cases.json new file mode 100644 index 00000000..1fd2d36d --- /dev/null +++ b/packages/appkit/src/plugins/proto/tests/scenario/public/cases.json @@ -0,0 +1,37 @@ +{ + "cases": [ + { + "description": "View all products", + "action": "load", + "filter": "all", + "expectedCount": 6, + "expectedIds": ["P001", "P002", "P003", "P004", "P005", "P006"] + }, + { + "description": "Filter by Electronics", + "action": "filter", + "filter": "Electronics", + "expectedCount": 3, + "expectedIds": ["P001", "P002", "P003"] + }, + { + "description": "Filter by Furniture", + "action": "filter", + "filter": "Furniture", + "expectedCount": 2, + "expectedIds": ["P004", "P005"] + }, + { + "description": "Health check returns proto plugin status", + "action": "api", + "endpoint": "/api/health", + "expectedBody": { "status": "ok", "plugin": "proto" } + }, + { + "description": "API returns snake_case field names (proto convention)", + "action": "api", + "endpoint": "/api/products/P001", + "expectedFields": ["id", "name", "category", "price", "stock", "in_stock"] + } + ] +} diff --git a/packages/appkit/src/plugins/proto/tests/scenario/tests/catalog.spec.ts b/packages/appkit/src/plugins/proto/tests/scenario/tests/catalog.spec.ts new file mode 100644 index 00000000..7f0efbf6 --- /dev/null +++ b/packages/appkit/src/plugins/proto/tests/scenario/tests/catalog.spec.ts @@ -0,0 +1,138 @@ +import * as fs from "node:fs"; +import * as path from "node:path"; +import { expect, test } from "@playwright/test"; + +interface TaskCase { + description: string; + action: "load" | "filter" | "api"; + filter?: string; + endpoint?: string; + expectedCount?: number; + expectedIds?: string[]; + expectedFields?: string[]; + expectedBody?: Record; + expectedStatus?: number; + expectedContentType?: string; + expectedStatusText?: string; + expectedColumns?: string[]; + expectedInStock?: string[]; + expectedOutOfStock?: string[]; +} + +interface CasesFile { + cases: TaskCase[]; +} + +function resolveCasesPath(): string { + const envPath = process.env.TASK_CASES_PATH; + if (envPath) + return path.isAbsolute(envPath) + ? envPath + : path.resolve(process.cwd(), envPath); + return path.join(__dirname, "..", "public", "cases.json"); +} + +const casesFile: CasesFile = JSON.parse( + fs.readFileSync(resolveCasesPath(), "utf8"), +); +const cases = casesFile.cases || []; +const appUrl = process.env.APP_URL || "http://localhost:3000"; + +test.describe("product catalog — proto plugin scenario", () => { + for (const c of cases) { + test(`${c.action}: ${c.description}`, async ({ page, request }) => { + switch (c.action) { + case "load": + case "filter": { + await page.goto(appUrl); + await page.waitForLoadState("networkidle"); + + if (c.action === "filter" && c.filter && c.filter !== "all") { + await page + .getByRole("combobox", { name: "Category" }) + .selectOption(c.filter); + await page.getByRole("button", { name: "Filter" }).click(); + await page.waitForLoadState("networkidle"); + } + + if (c.expectedCount !== undefined) { + await expect(page.getByRole("status")).toHaveText( + `Showing ${c.expectedCount} products`, + ); + } + + if (c.expectedStatusText) { + await expect(page.getByRole("status")).toHaveText( + c.expectedStatusText, + ); + } + + if (c.expectedIds) { + const table = page.getByRole("table", { name: "Products" }); + for (const id of c.expectedIds) { + await expect(table).toContainText(id); + } + } + + if (c.expectedColumns) { + const table = page.getByRole("table", { name: "Products" }); + for (const col of c.expectedColumns) { + await expect( + table.getByRole("columnheader", { name: col, exact: true }), + ).toBeVisible(); + } + } + + if (c.expectedInStock) { + const table = page.getByRole("table", { name: "Products" }); + for (const id of c.expectedInStock) { + const row = table.getByRole("row").filter({ hasText: id }); + await expect(row).toContainText("Yes"); + } + } + + if (c.expectedOutOfStock) { + const table = page.getByRole("table", { name: "Products" }); + for (const id of c.expectedOutOfStock) { + const row = table.getByRole("row").filter({ hasText: id }); + await expect(row).toContainText("No"); + } + } + break; + } + + case "api": { + const response = await request.get(`${appUrl}${c.endpoint}`); + + if (c.expectedStatus) { + expect(response.status()).toBe(c.expectedStatus); + return; + } + + expect(response.ok()).toBeTruthy(); + + if (c.expectedContentType) { + expect(response.headers()["content-type"]).toContain( + c.expectedContentType, + ); + } + + if (c.expectedBody) { + const body = await response.json(); + for (const [key, value] of Object.entries(c.expectedBody)) { + expect(body[key]).toEqual(value); + } + } + + if (c.expectedFields) { + const body = await response.json(); + for (const field of c.expectedFields) { + expect(body).toHaveProperty(field); + } + } + break; + } + } + }); + } +}); diff --git a/packages/appkit/src/plugins/proto/tests/serializer.test.ts b/packages/appkit/src/plugins/proto/tests/serializer.test.ts new file mode 100644 index 00000000..85896695 --- /dev/null +++ b/packages/appkit/src/plugins/proto/tests/serializer.test.ts @@ -0,0 +1,53 @@ +import { describe, expect, test, vi } from "vitest"; +import { ProtoSerializer } from "../serializer"; + +vi.mock("@bufbuild/protobuf", () => ({ + toBinary: vi.fn((_s: any, msg: any) => new TextEncoder().encode(JSON.stringify(msg))), + fromBinary: vi.fn((_s: any, data: Uint8Array) => JSON.parse(new TextDecoder().decode(data))), + toJson: vi.fn((_s: any, msg: any) => msg), + fromJson: vi.fn((_s: any, json: any) => json), +})); + +describe("ProtoSerializer", () => { + const schema = { typeName: "test.Message" } as any; + const message = { name: "test", value: 42 }; + + test("serialize produces Uint8Array", () => { + const s = new ProtoSerializer(); + const result = s.serialize(schema, message as any); + expect(result).toBeInstanceOf(Uint8Array); + expect(result.length).toBeGreaterThan(0); + }); + + test("round-trip preserves data", () => { + const s = new ProtoSerializer(); + const bytes = s.serialize(schema, message as any); + const recovered = s.deserialize(schema, bytes); + expect(recovered).toEqual(message); + }); + + test("toJSON returns value", () => { + const s = new ProtoSerializer(); + expect(s.toJSON(schema, message as any)).toEqual(message); + }); + + test("fromJSON returns value", () => { + const s = new ProtoSerializer(); + expect(s.fromJSON(schema, message as any)).toEqual(message); + }); + + test("handles nested objects", () => { + const s = new ProtoSerializer(); + const nested = { + metadata: { entries: { k1: "v1" } }, + rows: [{ fields: { score: { case: "numberValue", value: 95 } } }], + }; + const bytes = s.serialize(schema, nested as any); + expect(s.deserialize(schema, bytes)).toEqual(nested); + }); + + test("deserialize throws on invalid data", () => { + const s = new ProtoSerializer(); + expect(() => s.deserialize(schema, new Uint8Array([0xff, 0xfe]))).toThrow(); + }); +}); diff --git a/packages/appkit/src/plugins/proto/types.ts b/packages/appkit/src/plugins/proto/types.ts new file mode 100644 index 00000000..ce02beea --- /dev/null +++ b/packages/appkit/src/plugins/proto/types.ts @@ -0,0 +1,4 @@ +import type { BasePluginConfig } from "shared"; + +/** Configuration for the Proto plugin. */ +export interface IProtoConfig extends BasePluginConfig {} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a4d3ec7b..78641882 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -239,6 +239,9 @@ importers: packages/appkit: dependencies: + '@bufbuild/protobuf': + specifier: ^2.3.0 + version: 2.11.0 '@databricks/lakebase': specifier: workspace:* version: link:../lakebase @@ -1406,6 +1409,9 @@ packages: '@braintree/sanitize-url@7.1.1': resolution: {integrity: sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==} + '@bufbuild/protobuf@2.11.0': + resolution: {integrity: sha512-sBXGT13cpmPR5BMgHE6UEEfEaShh5Ror6rfN3yEK5si7QVrtZg8LEPQb0VVhiLRUslD2yLnXtnRzG035J/mZXQ==} + '@chevrotain/cst-dts-gen@11.0.3': resolution: {integrity: sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==} @@ -12360,6 +12366,8 @@ snapshots: '@braintree/sanitize-url@7.1.1': {} + '@bufbuild/protobuf@2.11.0': {} + '@chevrotain/cst-dts-gen@11.0.3': dependencies: '@chevrotain/gast': 11.0.3