diff --git a/README.md b/README.md index f75904373..9827ef988 100644 --- a/README.md +++ b/README.md @@ -146,7 +146,7 @@ Models must conform to the following schema, as defined in `app/schemas.ts`. - `npm`: String - AI SDK Package name - `env`: String[] - Environment variable keys used for auth - `doc`: String - Link to the provider's documentation -- `api` _(optional)_: String - OpenAI-compatible API endpoint. Required only when using `@ai-sdk/openai-compatible` as the npm package +- `api` _(optional)_: String - OpenAI-compatible API endpoint. Required when using `@ai-sdk/openai-compatible` or `@databricks/ai-sdk-provider` as the npm package **Model Schema:** @@ -186,6 +186,7 @@ See existing providers in the `providers/` directory for reference: - `providers/anthropic/` - Anthropic Claude models - `providers/openai/` - OpenAI GPT models - `providers/google/` - Google Gemini models +- `providers/databricks/` - Databricks Foundation Model APIs on **AI Gateway**: default **`mlflow/v1`** for chat and embeddings via **`@databricks/ai-sdk-provider`**; per-model **`[provider]`** for **Claude** (Anthropic Messages), **Gemini** (native API), and **OpenAI Responses**. See [providers/databricks/README.md](providers/databricks/README.md) for discovery, authentication, and validation scripts. ### Working on frontend diff --git a/bun.lock b/bun.lock index 57d0877f3..11eb4be11 100644 --- a/bun.lock +++ b/bun.lock @@ -13,6 +13,7 @@ "name": "models.dev", "version": "0.0.0", "dependencies": { + "@databricks/sdk-experimental": "^0.16.0", "zod": "catalog:", }, "devDependencies": { @@ -48,294 +49,340 @@ "zod": "3.24.2", }, "packages": { - "@cloudflare/workers-types": ["@cloudflare/workers-types@4.20250801.0", "", {}, "sha512-BQmMdoOGClY23TesgkR1PeGrPvPsSFD/zW7pDzWZHkOEsqkPk2A91h52bP8GbtKYTl1vdaYjQgJlGsP6Ih4G0w=="], + "@cloudflare/workers-types": ["@cloudflare/workers-types@4.20250801.0", "https://npm-proxy.dev.databricks.com/@cloudflare/workers-types/-/workers-types-4.20250801.0.tgz", {}, "sha512-BQmMdoOGClY23TesgkR1PeGrPvPsSFD/zW7pDzWZHkOEsqkPk2A91h52bP8GbtKYTl1vdaYjQgJlGsP6Ih4G0w=="], - "@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="], + "@databricks/sdk-experimental": ["@databricks/sdk-experimental@0.16.0", "https://npm-proxy.dev.databricks.com/@databricks/sdk-experimental/-/sdk-experimental-0.16.0.tgz", { "dependencies": { "google-auth-library": "^10.5.0", "ini": "^6.0.0", "reflect-metadata": "^0.2.2", "semver": "^7.7.3" } }, "sha512-9c2RxWYoRDFupdt4ZnBc1IPE1XaXgN+/wyV4DVcEqOnIa31ep51OnwAD/3014BImfKdyXg32nmgrB9dwvB6+lg=="], + + "@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "https://npm-proxy.dev.databricks.com/@modelcontextprotocol/sdk/-/sdk-1.6.1.tgz", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="], "@models.dev/function": ["@models.dev/function@workspace:packages/function"], "@models.dev/web": ["@models.dev/web@workspace:packages/web"], - "@tsconfig/bun": ["@tsconfig/bun@1.0.8", "", {}, "sha512-JlJaRaS4hBTypxtFe8WhnwV8blf0R+3yehLk8XuyxUYNx6VXsKCjACSCvOYEFUiqlhlBWxtYCn/zRlOb8BzBQg=="], + "@tsconfig/bun": ["@tsconfig/bun@1.0.8", "https://npm-proxy.dev.databricks.com/@tsconfig/bun/-/bun-1.0.8.tgz", {}, "sha512-JlJaRaS4hBTypxtFe8WhnwV8blf0R+3yehLk8XuyxUYNx6VXsKCjACSCvOYEFUiqlhlBWxtYCn/zRlOb8BzBQg=="], + + "@types/bun": ["@types/bun@1.2.16", "https://npm-proxy.dev.databricks.com/@types/bun/-/bun-1.2.16.tgz", { "dependencies": { "bun-types": "1.2.16" } }, "sha512-1aCZJ/6nSiViw339RsaNhkNoEloLaPzZhxMOYEa7OzRzO41IGg5n/7I43/ZIAW/c+Q6cT12Vf7fOZOoVIzb5BQ=="], + + "@types/node": ["@types/node@22.13.9", "https://npm-proxy.dev.databricks.com/@types/node/-/node-22.13.9.tgz", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-acBjXdRJ3A6Pb3tqnw9HZmyR3Fiol3aGxRCK1x3d+6CDAMjl7I649wpSd+yNURCjbOUGu9tqtLKnTGxmK6CyGw=="], + + "@types/react": ["@types/react@19.2.2", "https://npm-proxy.dev.databricks.com/@types/react/-/react-19.2.2.tgz", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA=="], + + "accepts": ["accepts@2.0.0", "https://npm-proxy.dev.databricks.com/accepts/-/accepts-2.0.0.tgz", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="], + + "agent-base": ["agent-base@7.1.4", "https://npm-proxy.dev.databricks.com/agent-base/-/agent-base-7.1.4.tgz", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="], + + "available-typed-arrays": ["available-typed-arrays@1.0.7", "https://npm-proxy.dev.databricks.com/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", { "dependencies": { "possible-typed-array-names": "^1.0.0" } }, "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ=="], + + "aws-sdk": ["aws-sdk@2.1692.0", "https://npm-proxy.dev.databricks.com/aws-sdk/-/aws-sdk-2.1692.0.tgz", { "dependencies": { "buffer": "4.9.2", "events": "1.1.1", "ieee754": "1.1.13", "jmespath": "0.16.0", "querystring": "0.2.0", "sax": "1.2.1", "url": "0.10.3", "util": "^0.12.4", "uuid": "8.0.0", "xml2js": "0.6.2" } }, "sha512-x511uiJ/57FIsbgUe5csJ13k3uzu25uWQE+XqfBis/sB0SFoiElJWXRkgEAUh0U6n40eT3ay5Ue4oPkRMu1LYw=="], + + "aws4fetch": ["aws4fetch@1.0.18", "https://npm-proxy.dev.databricks.com/aws4fetch/-/aws4fetch-1.0.18.tgz", {}, "sha512-3Cf+YaUl07p24MoQ46rFwulAmiyCwH2+1zw1ZyPAX5OtJ34Hh185DwB8y/qRLb6cYYYtSFJ9pthyLc0MD4e8sQ=="], + + "base64-js": ["base64-js@1.5.1", "https://npm-proxy.dev.databricks.com/base64-js/-/base64-js-1.5.1.tgz", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], + + "bignumber.js": ["bignumber.js@9.3.1", "https://npm-proxy.dev.databricks.com/bignumber.js/-/bignumber.js-9.3.1.tgz", {}, "sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ=="], + + "body-parser": ["body-parser@2.2.0", "https://npm-proxy.dev.databricks.com/body-parser/-/body-parser-2.2.0.tgz", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.0", "http-errors": "^2.0.0", "iconv-lite": "^0.6.3", "on-finished": "^2.4.1", "qs": "^6.14.0", "raw-body": "^3.0.0", "type-is": "^2.0.0" } }, "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg=="], - "@types/bun": ["@types/bun@1.2.16", "", { "dependencies": { "bun-types": "1.2.16" } }, "sha512-1aCZJ/6nSiViw339RsaNhkNoEloLaPzZhxMOYEa7OzRzO41IGg5n/7I43/ZIAW/c+Q6cT12Vf7fOZOoVIzb5BQ=="], + "buffer": ["buffer@4.9.2", "https://npm-proxy.dev.databricks.com/buffer/-/buffer-4.9.2.tgz", { "dependencies": { "base64-js": "^1.0.2", "ieee754": "^1.1.4", "isarray": "^1.0.0" } }, "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg=="], - "@types/node": ["@types/node@22.13.9", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-acBjXdRJ3A6Pb3tqnw9HZmyR3Fiol3aGxRCK1x3d+6CDAMjl7I649wpSd+yNURCjbOUGu9tqtLKnTGxmK6CyGw=="], + "buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "https://npm-proxy.dev.databricks.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="], - "@types/react": ["@types/react@19.2.2", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA=="], + "bun-types": ["bun-types@1.2.16", "https://npm-proxy.dev.databricks.com/bun-types/-/bun-types-1.2.16.tgz", { "dependencies": { "@types/node": "*" } }, "sha512-ciXLrHV4PXax9vHvUrkvun9VPVGOVwbbbBF/Ev1cXz12lyEZMoJpIJABOfPcN9gDJRaiKF9MVbSygLg4NXu3/A=="], - "accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="], + "bytes": ["bytes@3.1.2", "https://npm-proxy.dev.databricks.com/bytes/-/bytes-3.1.2.tgz", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], - "available-typed-arrays": ["available-typed-arrays@1.0.7", "", { "dependencies": { "possible-typed-array-names": "^1.0.0" } }, "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ=="], + "call-bind": ["call-bind@1.0.8", "https://npm-proxy.dev.databricks.com/call-bind/-/call-bind-1.0.8.tgz", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="], - "aws-sdk": ["aws-sdk@2.1692.0", "", { "dependencies": { "buffer": "4.9.2", "events": "1.1.1", "ieee754": "1.1.13", "jmespath": "0.16.0", "querystring": "0.2.0", "sax": "1.2.1", "url": "0.10.3", "util": "^0.12.4", "uuid": "8.0.0", "xml2js": "0.6.2" } }, "sha512-x511uiJ/57FIsbgUe5csJ13k3uzu25uWQE+XqfBis/sB0SFoiElJWXRkgEAUh0U6n40eT3ay5Ue4oPkRMu1LYw=="], + "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "https://npm-proxy.dev.databricks.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="], - "aws4fetch": ["aws4fetch@1.0.18", "", {}, "sha512-3Cf+YaUl07p24MoQ46rFwulAmiyCwH2+1zw1ZyPAX5OtJ34Hh185DwB8y/qRLb6cYYYtSFJ9pthyLc0MD4e8sQ=="], + "call-bound": ["call-bound@1.0.4", "https://npm-proxy.dev.databricks.com/call-bound/-/call-bound-1.0.4.tgz", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "get-intrinsic": "^1.3.0" } }, "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg=="], - "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], + "content-disposition": ["content-disposition@1.0.0", "https://npm-proxy.dev.databricks.com/content-disposition/-/content-disposition-1.0.0.tgz", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg=="], - "body-parser": ["body-parser@2.2.0", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.0", "http-errors": "^2.0.0", "iconv-lite": "^0.6.3", "on-finished": "^2.4.1", "qs": "^6.14.0", "raw-body": "^3.0.0", "type-is": "^2.0.0" } }, "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg=="], + "content-type": ["content-type@1.0.5", "https://npm-proxy.dev.databricks.com/content-type/-/content-type-1.0.5.tgz", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="], - "buffer": ["buffer@4.9.2", "", { "dependencies": { "base64-js": "^1.0.2", "ieee754": "^1.1.4", "isarray": "^1.0.0" } }, "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg=="], + "cookie": ["cookie@0.7.2", "https://npm-proxy.dev.databricks.com/cookie/-/cookie-0.7.2.tgz", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], - "bun-types": ["bun-types@1.2.16", "", { "dependencies": { "@types/node": "*" } }, "sha512-ciXLrHV4PXax9vHvUrkvun9VPVGOVwbbbBF/Ev1cXz12lyEZMoJpIJABOfPcN9gDJRaiKF9MVbSygLg4NXu3/A=="], + "cookie-signature": ["cookie-signature@1.2.2", "https://npm-proxy.dev.databricks.com/cookie-signature/-/cookie-signature-1.2.2.tgz", {}, "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg=="], - "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], + "cors": ["cors@2.8.5", "https://npm-proxy.dev.databricks.com/cors/-/cors-2.8.5.tgz", { "dependencies": { "object-assign": "^4", "vary": "^1" } }, "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g=="], - "call-bind": ["call-bind@1.0.8", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="], + "csstype": ["csstype@3.1.3", "https://npm-proxy.dev.databricks.com/csstype/-/csstype-3.1.3.tgz", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="], - "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="], + "data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "https://npm-proxy.dev.databricks.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="], - "call-bound": ["call-bound@1.0.4", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "get-intrinsic": "^1.3.0" } }, "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg=="], + "debug": ["debug@4.4.1", "https://npm-proxy.dev.databricks.com/debug/-/debug-4.4.1.tgz", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="], - "content-disposition": ["content-disposition@1.0.0", "", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg=="], + "define-data-property": ["define-data-property@1.1.4", "https://npm-proxy.dev.databricks.com/define-data-property/-/define-data-property-1.1.4.tgz", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.0.1" } }, "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A=="], - "content-type": ["content-type@1.0.5", "", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="], + "depd": ["depd@2.0.0", "https://npm-proxy.dev.databricks.com/depd/-/depd-2.0.0.tgz", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], - "cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], + "dunder-proto": ["dunder-proto@1.0.1", "https://npm-proxy.dev.databricks.com/dunder-proto/-/dunder-proto-1.0.1.tgz", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], - "cookie-signature": ["cookie-signature@1.2.2", "", {}, "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg=="], + "ecdsa-sig-formatter": ["ecdsa-sig-formatter@1.0.11", "https://npm-proxy.dev.databricks.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ=="], - "cors": ["cors@2.8.5", "", { "dependencies": { "object-assign": "^4", "vary": "^1" } }, "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g=="], + "ee-first": ["ee-first@1.1.1", "https://npm-proxy.dev.databricks.com/ee-first/-/ee-first-1.1.1.tgz", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], - "csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="], + "encodeurl": ["encodeurl@2.0.0", "https://npm-proxy.dev.databricks.com/encodeurl/-/encodeurl-2.0.0.tgz", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="], - "debug": ["debug@4.4.1", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="], + "es-define-property": ["es-define-property@1.0.1", "https://npm-proxy.dev.databricks.com/es-define-property/-/es-define-property-1.0.1.tgz", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], - "define-data-property": ["define-data-property@1.1.4", "", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.0.1" } }, "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A=="], + "es-errors": ["es-errors@1.3.0", "https://npm-proxy.dev.databricks.com/es-errors/-/es-errors-1.3.0.tgz", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], - "depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], + "es-object-atoms": ["es-object-atoms@1.1.1", "https://npm-proxy.dev.databricks.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="], - "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], + "escape-html": ["escape-html@1.0.3", "https://npm-proxy.dev.databricks.com/escape-html/-/escape-html-1.0.3.tgz", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="], - "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], + "etag": ["etag@1.8.1", "https://npm-proxy.dev.databricks.com/etag/-/etag-1.8.1.tgz", {}, "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="], - "encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="], + "events": ["events@1.1.1", "https://npm-proxy.dev.databricks.com/events/-/events-1.1.1.tgz", {}, "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw=="], - "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], + "eventsource": ["eventsource@3.0.7", "https://npm-proxy.dev.databricks.com/eventsource/-/eventsource-3.0.7.tgz", { "dependencies": { "eventsource-parser": "^3.0.1" } }, "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA=="], - "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], + "eventsource-parser": ["eventsource-parser@3.0.2", "https://npm-proxy.dev.databricks.com/eventsource-parser/-/eventsource-parser-3.0.2.tgz", {}, "sha512-6RxOBZ/cYgd8usLwsEl+EC09Au/9BcmCKYF2/xbml6DNczf7nv0MQb+7BA2F+li6//I+28VNlQR37XfQtcAJuA=="], - "es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="], + "express": ["express@5.1.0", "https://npm-proxy.dev.databricks.com/express/-/express-5.1.0.tgz", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.0", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA=="], - "escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="], + "express-rate-limit": ["express-rate-limit@7.5.0", "https://npm-proxy.dev.databricks.com/express-rate-limit/-/express-rate-limit-7.5.0.tgz", { "peerDependencies": { "express": "^4.11 || 5 || ^5.0.0-beta.1" } }, "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg=="], - "etag": ["etag@1.8.1", "", {}, "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="], + "extend": ["extend@3.0.2", "https://npm-proxy.dev.databricks.com/extend/-/extend-3.0.2.tgz", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="], - "events": ["events@1.1.1", "", {}, "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw=="], + "fetch-blob": ["fetch-blob@3.2.0", "https://npm-proxy.dev.databricks.com/fetch-blob/-/fetch-blob-3.2.0.tgz", { "dependencies": { "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" } }, "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ=="], - "eventsource": ["eventsource@3.0.7", "", { "dependencies": { "eventsource-parser": "^3.0.1" } }, "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA=="], + "finalhandler": ["finalhandler@2.1.0", "https://npm-proxy.dev.databricks.com/finalhandler/-/finalhandler-2.1.0.tgz", { "dependencies": { "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "on-finished": "^2.4.1", "parseurl": "^1.3.3", "statuses": "^2.0.1" } }, "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q=="], - "eventsource-parser": ["eventsource-parser@3.0.2", "", {}, "sha512-6RxOBZ/cYgd8usLwsEl+EC09Au/9BcmCKYF2/xbml6DNczf7nv0MQb+7BA2F+li6//I+28VNlQR37XfQtcAJuA=="], + "for-each": ["for-each@0.3.5", "https://npm-proxy.dev.databricks.com/for-each/-/for-each-0.3.5.tgz", { "dependencies": { "is-callable": "^1.2.7" } }, "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg=="], - "express": ["express@5.1.0", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.0", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA=="], + "formdata-polyfill": ["formdata-polyfill@4.0.10", "https://npm-proxy.dev.databricks.com/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", { "dependencies": { "fetch-blob": "^3.1.2" } }, "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g=="], - "express-rate-limit": ["express-rate-limit@7.5.0", "", { "peerDependencies": { "express": "^4.11 || 5 || ^5.0.0-beta.1" } }, "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg=="], + "forwarded": ["forwarded@0.2.0", "https://npm-proxy.dev.databricks.com/forwarded/-/forwarded-0.2.0.tgz", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="], - "finalhandler": ["finalhandler@2.1.0", "", { "dependencies": { "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "on-finished": "^2.4.1", "parseurl": "^1.3.3", "statuses": "^2.0.1" } }, "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q=="], + "fresh": ["fresh@2.0.0", "https://npm-proxy.dev.databricks.com/fresh/-/fresh-2.0.0.tgz", {}, "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A=="], - "for-each": ["for-each@0.3.5", "", { "dependencies": { "is-callable": "^1.2.7" } }, "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg=="], + "function-bind": ["function-bind@1.1.2", "https://npm-proxy.dev.databricks.com/function-bind/-/function-bind-1.1.2.tgz", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], - "forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="], + "gaxios": ["gaxios@7.1.4", "https://npm-proxy.dev.databricks.com/gaxios/-/gaxios-7.1.4.tgz", { "dependencies": { "extend": "^3.0.2", "https-proxy-agent": "^7.0.1", "node-fetch": "^3.3.2" } }, "sha512-bTIgTsM2bWn3XklZISBTQX7ZSddGW+IO3bMdGaemHZ3tbqExMENHLx6kKZ/KlejgrMtj8q7wBItt51yegqalrA=="], - "fresh": ["fresh@2.0.0", "", {}, "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A=="], + "gcp-metadata": ["gcp-metadata@8.1.2", "https://npm-proxy.dev.databricks.com/gcp-metadata/-/gcp-metadata-8.1.2.tgz", { "dependencies": { "gaxios": "^7.0.0", "google-logging-utils": "^1.0.0", "json-bigint": "^1.0.0" } }, "sha512-zV/5HKTfCeKWnxG0Dmrw51hEWFGfcF2xiXqcA3+J90WDuP0SvoiSO5ORvcBsifmx/FoIjgQN3oNOGaQ5PhLFkg=="], - "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], + "get-intrinsic": ["get-intrinsic@1.3.0", "https://npm-proxy.dev.databricks.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="], - "get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="], + "get-proto": ["get-proto@1.0.1", "https://npm-proxy.dev.databricks.com/get-proto/-/get-proto-1.0.1.tgz", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], - "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], + "google-auth-library": ["google-auth-library@10.6.2", "https://npm-proxy.dev.databricks.com/google-auth-library/-/google-auth-library-10.6.2.tgz", { "dependencies": { "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", "gaxios": "^7.1.4", "gcp-metadata": "8.1.2", "google-logging-utils": "1.1.3", "jws": "^4.0.0" } }, "sha512-e27Z6EThmVNNvtYASwQxose/G57rkRuaRbQyxM2bvYLLX/GqWZ5chWq2EBoUchJbCc57eC9ArzO5wMsEmWftCw=="], - "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], + "google-logging-utils": ["google-logging-utils@1.1.3", "https://npm-proxy.dev.databricks.com/google-logging-utils/-/google-logging-utils-1.1.3.tgz", {}, "sha512-eAmLkjDjAFCVXg7A1unxHsLf961m6y17QFqXqAXGj/gVkKFrEICfStRfwUlGNfeCEjNRa32JEWOUTlYXPyyKvA=="], - "has-property-descriptors": ["has-property-descriptors@1.0.2", "", { "dependencies": { "es-define-property": "^1.0.0" } }, "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg=="], + "gopd": ["gopd@1.2.0", "https://npm-proxy.dev.databricks.com/gopd/-/gopd-1.2.0.tgz", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], - "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], + "has-property-descriptors": ["has-property-descriptors@1.0.2", "https://npm-proxy.dev.databricks.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", { "dependencies": { "es-define-property": "^1.0.0" } }, "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg=="], - "has-tostringtag": ["has-tostringtag@1.0.2", "", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="], + "has-symbols": ["has-symbols@1.1.0", "https://npm-proxy.dev.databricks.com/has-symbols/-/has-symbols-1.1.0.tgz", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], - "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], + "has-tostringtag": ["has-tostringtag@1.0.2", "https://npm-proxy.dev.databricks.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="], - "hono": ["hono@4.8.0", "", {}, "sha512-NoiHrqJxoe1MYXqW+/0/Q4NCizKj2Ivm4KmX8mOSBtw9UJ7KYaOGKkO7csIwO5UlZpfvVRdcgiMb0GGyjEjtcw=="], + "hasown": ["hasown@2.0.2", "https://npm-proxy.dev.databricks.com/hasown/-/hasown-2.0.2.tgz", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], - "http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="], + "hono": ["hono@4.8.0", "https://npm-proxy.dev.databricks.com/hono/-/hono-4.8.0.tgz", {}, "sha512-NoiHrqJxoe1MYXqW+/0/Q4NCizKj2Ivm4KmX8mOSBtw9UJ7KYaOGKkO7csIwO5UlZpfvVRdcgiMb0GGyjEjtcw=="], - "iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + "http-errors": ["http-errors@2.0.0", "https://npm-proxy.dev.databricks.com/http-errors/-/http-errors-2.0.0.tgz", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="], - "ieee754": ["ieee754@1.1.13", "", {}, "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg=="], + "https-proxy-agent": ["https-proxy-agent@7.0.6", "https://npm-proxy.dev.databricks.com/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", { "dependencies": { "agent-base": "^7.1.2", "debug": "4" } }, "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="], - "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], + "iconv-lite": ["iconv-lite@0.6.3", "https://npm-proxy.dev.databricks.com/iconv-lite/-/iconv-lite-0.6.3.tgz", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], - "ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="], + "ieee754": ["ieee754@1.1.13", "https://npm-proxy.dev.databricks.com/ieee754/-/ieee754-1.1.13.tgz", {}, "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg=="], - "is-arguments": ["is-arguments@1.2.0", "", { "dependencies": { "call-bound": "^1.0.2", "has-tostringtag": "^1.0.2" } }, "sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA=="], + "inherits": ["inherits@2.0.4", "https://npm-proxy.dev.databricks.com/inherits/-/inherits-2.0.4.tgz", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], - "is-callable": ["is-callable@1.2.7", "", {}, "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA=="], + "ini": ["ini@6.0.0", "https://npm-proxy.dev.databricks.com/ini/-/ini-6.0.0.tgz", {}, "sha512-IBTdIkzZNOpqm7q3dRqJvMaldXjDHWkEDfrwGEQTs5eaQMWV+djAhR+wahyNNMAa+qpbDUhBMVt4ZKNwpPm7xQ=="], - "is-generator-function": ["is-generator-function@1.1.0", "", { "dependencies": { "call-bound": "^1.0.3", "get-proto": "^1.0.0", "has-tostringtag": "^1.0.2", "safe-regex-test": "^1.1.0" } }, "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ=="], + "ipaddr.js": ["ipaddr.js@1.9.1", "https://npm-proxy.dev.databricks.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="], - "is-promise": ["is-promise@4.0.0", "", {}, "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ=="], + "is-arguments": ["is-arguments@1.2.0", "https://npm-proxy.dev.databricks.com/is-arguments/-/is-arguments-1.2.0.tgz", { "dependencies": { "call-bound": "^1.0.2", "has-tostringtag": "^1.0.2" } }, "sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA=="], - "is-regex": ["is-regex@1.2.1", "", { "dependencies": { "call-bound": "^1.0.2", "gopd": "^1.2.0", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g=="], + "is-callable": ["is-callable@1.2.7", "https://npm-proxy.dev.databricks.com/is-callable/-/is-callable-1.2.7.tgz", {}, "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA=="], - "is-typed-array": ["is-typed-array@1.1.15", "", { "dependencies": { "which-typed-array": "^1.1.16" } }, "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ=="], + "is-generator-function": ["is-generator-function@1.1.0", "https://npm-proxy.dev.databricks.com/is-generator-function/-/is-generator-function-1.1.0.tgz", { "dependencies": { "call-bound": "^1.0.3", "get-proto": "^1.0.0", "has-tostringtag": "^1.0.2", "safe-regex-test": "^1.1.0" } }, "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ=="], - "isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="], + "is-promise": ["is-promise@4.0.0", "https://npm-proxy.dev.databricks.com/is-promise/-/is-promise-4.0.0.tgz", {}, "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ=="], - "jmespath": ["jmespath@0.16.0", "", {}, "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw=="], + "is-regex": ["is-regex@1.2.1", "https://npm-proxy.dev.databricks.com/is-regex/-/is-regex-1.2.1.tgz", { "dependencies": { "call-bound": "^1.0.2", "gopd": "^1.2.0", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g=="], - "jose": ["jose@5.2.3", "", {}, "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA=="], + "is-typed-array": ["is-typed-array@1.1.15", "https://npm-proxy.dev.databricks.com/is-typed-array/-/is-typed-array-1.1.15.tgz", { "dependencies": { "which-typed-array": "^1.1.16" } }, "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ=="], - "lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="], + "isarray": ["isarray@1.0.0", "https://npm-proxy.dev.databricks.com/isarray/-/isarray-1.0.0.tgz", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="], - "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], + "jmespath": ["jmespath@0.16.0", "https://npm-proxy.dev.databricks.com/jmespath/-/jmespath-0.16.0.tgz", {}, "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw=="], - "media-typer": ["media-typer@1.1.0", "", {}, "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw=="], + "jose": ["jose@5.2.3", "https://npm-proxy.dev.databricks.com/jose/-/jose-5.2.3.tgz", {}, "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA=="], - "merge-descriptors": ["merge-descriptors@2.0.0", "", {}, "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g=="], + "json-bigint": ["json-bigint@1.0.0", "https://npm-proxy.dev.databricks.com/json-bigint/-/json-bigint-1.0.0.tgz", { "dependencies": { "bignumber.js": "^9.0.0" } }, "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ=="], - "mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], + "jwa": ["jwa@2.0.1", "https://npm-proxy.dev.databricks.com/jwa/-/jwa-2.0.1.tgz", { "dependencies": { "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg=="], - "mime-types": ["mime-types@3.0.1", "", { "dependencies": { "mime-db": "^1.54.0" } }, "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA=="], + "jws": ["jws@4.0.1", "https://npm-proxy.dev.databricks.com/jws/-/jws-4.0.1.tgz", { "dependencies": { "jwa": "^2.0.1", "safe-buffer": "^5.0.1" } }, "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA=="], + + "lru-cache": ["lru-cache@6.0.0", "https://npm-proxy.dev.databricks.com/lru-cache/-/lru-cache-6.0.0.tgz", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="], + + "math-intrinsics": ["math-intrinsics@1.1.0", "https://npm-proxy.dev.databricks.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], + + "media-typer": ["media-typer@1.1.0", "https://npm-proxy.dev.databricks.com/media-typer/-/media-typer-1.1.0.tgz", {}, "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw=="], + + "merge-descriptors": ["merge-descriptors@2.0.0", "https://npm-proxy.dev.databricks.com/merge-descriptors/-/merge-descriptors-2.0.0.tgz", {}, "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g=="], + + "mime-db": ["mime-db@1.54.0", "https://npm-proxy.dev.databricks.com/mime-db/-/mime-db-1.54.0.tgz", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], + + "mime-types": ["mime-types@3.0.1", "https://npm-proxy.dev.databricks.com/mime-types/-/mime-types-3.0.1.tgz", { "dependencies": { "mime-db": "^1.54.0" } }, "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA=="], "models.dev": ["models.dev@workspace:packages/core"], - "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], + "ms": ["ms@2.1.3", "https://npm-proxy.dev.databricks.com/ms/-/ms-2.1.3.tgz", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], + + "negotiator": ["negotiator@1.0.0", "https://npm-proxy.dev.databricks.com/negotiator/-/negotiator-1.0.0.tgz", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="], + + "node-domexception": ["node-domexception@1.0.0", "https://npm-proxy.dev.databricks.com/node-domexception/-/node-domexception-1.0.0.tgz", {}, "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ=="], + + "node-fetch": ["node-fetch@3.3.2", "https://npm-proxy.dev.databricks.com/node-fetch/-/node-fetch-3.3.2.tgz", { "dependencies": { "data-uri-to-buffer": "^4.0.0", "fetch-blob": "^3.1.4", "formdata-polyfill": "^4.0.10" } }, "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA=="], + + "object-assign": ["object-assign@4.1.1", "https://npm-proxy.dev.databricks.com/object-assign/-/object-assign-4.1.1.tgz", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="], + + "object-hash": ["object-hash@2.2.0", "https://npm-proxy.dev.databricks.com/object-hash/-/object-hash-2.2.0.tgz", {}, "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw=="], - "negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="], + "object-inspect": ["object-inspect@1.13.4", "https://npm-proxy.dev.databricks.com/object-inspect/-/object-inspect-1.13.4.tgz", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="], - "object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="], + "oidc-token-hash": ["oidc-token-hash@5.1.0", "https://npm-proxy.dev.databricks.com/oidc-token-hash/-/oidc-token-hash-5.1.0.tgz", {}, "sha512-y0W+X7Ppo7oZX6eovsRkuzcSM40Bicg2JEJkDJ4irIt1wsYAP5MLSNv+QAogO8xivMffw/9OvV3um1pxXgt1uA=="], - "object-hash": ["object-hash@2.2.0", "", {}, "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw=="], + "on-finished": ["on-finished@2.4.1", "https://npm-proxy.dev.databricks.com/on-finished/-/on-finished-2.4.1.tgz", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="], - "object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="], + "once": ["once@1.4.0", "https://npm-proxy.dev.databricks.com/once/-/once-1.4.0.tgz", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], - "oidc-token-hash": ["oidc-token-hash@5.1.0", "", {}, "sha512-y0W+X7Ppo7oZX6eovsRkuzcSM40Bicg2JEJkDJ4irIt1wsYAP5MLSNv+QAogO8xivMffw/9OvV3um1pxXgt1uA=="], + "opencontrol": ["opencontrol@0.0.6", "https://npm-proxy.dev.databricks.com/opencontrol/-/opencontrol-0.0.6.tgz", { "dependencies": { "@modelcontextprotocol/sdk": "1.6.1", "@tsconfig/bun": "1.0.7", "hono": "4.7.4", "zod": "3.24.2", "zod-to-json-schema": "3.24.3" }, "bin": { "opencontrol": "bin/index.mjs" } }, "sha512-QeCrpOK5D15QV8kjnGVeD/BHFLwcVr+sn4T6KKmP0WAMs2pww56e4h+eOGHb5iPOufUQXbdbBKi6WV2kk7tefQ=="], - "on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="], + "openid-client": ["openid-client@5.6.4", "https://npm-proxy.dev.databricks.com/openid-client/-/openid-client-5.6.4.tgz", { "dependencies": { "jose": "^4.15.4", "lru-cache": "^6.0.0", "object-hash": "^2.2.0", "oidc-token-hash": "^5.0.3" } }, "sha512-T1h3B10BRPKfcObdBklX639tVz+xh34O7GjofqrqiAQdm7eHsQ00ih18x6wuJ/E6FxdtS2u3FmUGPDeEcMwzNA=="], - "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], + "parseurl": ["parseurl@1.3.3", "https://npm-proxy.dev.databricks.com/parseurl/-/parseurl-1.3.3.tgz", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="], - "opencontrol": ["opencontrol@0.0.6", "", { "dependencies": { "@modelcontextprotocol/sdk": "1.6.1", "@tsconfig/bun": "1.0.7", "hono": "4.7.4", "zod": "3.24.2", "zod-to-json-schema": "3.24.3" }, "bin": { "opencontrol": "bin/index.mjs" } }, "sha512-QeCrpOK5D15QV8kjnGVeD/BHFLwcVr+sn4T6KKmP0WAMs2pww56e4h+eOGHb5iPOufUQXbdbBKi6WV2kk7tefQ=="], + "path-to-regexp": ["path-to-regexp@8.2.0", "https://npm-proxy.dev.databricks.com/path-to-regexp/-/path-to-regexp-8.2.0.tgz", {}, "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ=="], - "openid-client": ["openid-client@5.6.4", "", { "dependencies": { "jose": "^4.15.4", "lru-cache": "^6.0.0", "object-hash": "^2.2.0", "oidc-token-hash": "^5.0.3" } }, "sha512-T1h3B10BRPKfcObdBklX639tVz+xh34O7GjofqrqiAQdm7eHsQ00ih18x6wuJ/E6FxdtS2u3FmUGPDeEcMwzNA=="], + "pkce-challenge": ["pkce-challenge@4.1.0", "https://npm-proxy.dev.databricks.com/pkce-challenge/-/pkce-challenge-4.1.0.tgz", {}, "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ=="], - "parseurl": ["parseurl@1.3.3", "", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="], + "possible-typed-array-names": ["possible-typed-array-names@1.1.0", "https://npm-proxy.dev.databricks.com/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", {}, "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg=="], - "path-to-regexp": ["path-to-regexp@8.2.0", "", {}, "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ=="], + "proxy-addr": ["proxy-addr@2.0.7", "https://npm-proxy.dev.databricks.com/proxy-addr/-/proxy-addr-2.0.7.tgz", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="], - "pkce-challenge": ["pkce-challenge@4.1.0", "", {}, "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ=="], + "punycode": ["punycode@1.3.2", "https://npm-proxy.dev.databricks.com/punycode/-/punycode-1.3.2.tgz", {}, "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw=="], - "possible-typed-array-names": ["possible-typed-array-names@1.1.0", "", {}, "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg=="], + "qs": ["qs@6.14.0", "https://npm-proxy.dev.databricks.com/qs/-/qs-6.14.0.tgz", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w=="], - "proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="], + "querystring": ["querystring@0.2.0", "https://npm-proxy.dev.databricks.com/querystring/-/querystring-0.2.0.tgz", {}, "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g=="], - "punycode": ["punycode@1.3.2", "", {}, "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw=="], + "range-parser": ["range-parser@1.2.1", "https://npm-proxy.dev.databricks.com/range-parser/-/range-parser-1.2.1.tgz", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="], - "qs": ["qs@6.14.0", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w=="], + "raw-body": ["raw-body@3.0.0", "https://npm-proxy.dev.databricks.com/raw-body/-/raw-body-3.0.0.tgz", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.6.3", "unpipe": "1.0.0" } }, "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g=="], - "querystring": ["querystring@0.2.0", "", {}, "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g=="], + "reflect-metadata": ["reflect-metadata@0.2.2", "https://npm-proxy.dev.databricks.com/reflect-metadata/-/reflect-metadata-0.2.2.tgz", {}, "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q=="], - "range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="], + "router": ["router@2.2.0", "https://npm-proxy.dev.databricks.com/router/-/router-2.2.0.tgz", { "dependencies": { "debug": "^4.4.0", "depd": "^2.0.0", "is-promise": "^4.0.0", "parseurl": "^1.3.3", "path-to-regexp": "^8.0.0" } }, "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ=="], - "raw-body": ["raw-body@3.0.0", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.6.3", "unpipe": "1.0.0" } }, "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g=="], + "safe-buffer": ["safe-buffer@5.2.1", "https://npm-proxy.dev.databricks.com/safe-buffer/-/safe-buffer-5.2.1.tgz", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], - "router": ["router@2.2.0", "", { "dependencies": { "debug": "^4.4.0", "depd": "^2.0.0", "is-promise": "^4.0.0", "parseurl": "^1.3.3", "path-to-regexp": "^8.0.0" } }, "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ=="], + "safe-regex-test": ["safe-regex-test@1.1.0", "https://npm-proxy.dev.databricks.com/safe-regex-test/-/safe-regex-test-1.1.0.tgz", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "is-regex": "^1.2.1" } }, "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw=="], - "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], + "safer-buffer": ["safer-buffer@2.1.2", "https://npm-proxy.dev.databricks.com/safer-buffer/-/safer-buffer-2.1.2.tgz", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="], - "safe-regex-test": ["safe-regex-test@1.1.0", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "is-regex": "^1.2.1" } }, "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw=="], + "sax": ["sax@1.2.1", "https://npm-proxy.dev.databricks.com/sax/-/sax-1.2.1.tgz", {}, "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA=="], - "safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="], + "semver": ["semver@7.7.4", "https://npm-proxy.dev.databricks.com/semver/-/semver-7.7.4.tgz", { "bin": { "semver": "bin/semver.js" } }, "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA=="], - "sax": ["sax@1.2.1", "", {}, "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA=="], + "send": ["send@1.2.0", "https://npm-proxy.dev.databricks.com/send/-/send-1.2.0.tgz", { "dependencies": { "debug": "^4.3.5", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", "http-errors": "^2.0.0", "mime-types": "^3.0.1", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", "statuses": "^2.0.1" } }, "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw=="], - "send": ["send@1.2.0", "", { "dependencies": { "debug": "^4.3.5", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", "http-errors": "^2.0.0", "mime-types": "^3.0.1", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", "statuses": "^2.0.1" } }, "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw=="], + "serve-static": ["serve-static@2.2.0", "https://npm-proxy.dev.databricks.com/serve-static/-/serve-static-2.2.0.tgz", { "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "parseurl": "^1.3.3", "send": "^1.2.0" } }, "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ=="], - "serve-static": ["serve-static@2.2.0", "", { "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "parseurl": "^1.3.3", "send": "^1.2.0" } }, "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ=="], + "set-function-length": ["set-function-length@1.2.2", "https://npm-proxy.dev.databricks.com/set-function-length/-/set-function-length-1.2.2.tgz", { "dependencies": { "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", "has-property-descriptors": "^1.0.2" } }, "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg=="], - "set-function-length": ["set-function-length@1.2.2", "", { "dependencies": { "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", "has-property-descriptors": "^1.0.2" } }, "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg=="], + "setprototypeof": ["setprototypeof@1.2.0", "https://npm-proxy.dev.databricks.com/setprototypeof/-/setprototypeof-1.2.0.tgz", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="], - "setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="], + "side-channel": ["side-channel@1.1.0", "https://npm-proxy.dev.databricks.com/side-channel/-/side-channel-1.1.0.tgz", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="], - "side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="], + "side-channel-list": ["side-channel-list@1.0.0", "https://npm-proxy.dev.databricks.com/side-channel-list/-/side-channel-list-1.0.0.tgz", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="], - "side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="], + "side-channel-map": ["side-channel-map@1.0.1", "https://npm-proxy.dev.databricks.com/side-channel-map/-/side-channel-map-1.0.1.tgz", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="], - "side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="], + "side-channel-weakmap": ["side-channel-weakmap@1.0.2", "https://npm-proxy.dev.databricks.com/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="], - "side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="], + "sst": ["sst@3.17.23", "https://npm-proxy.dev.databricks.com/sst/-/sst-3.17.23.tgz", { "dependencies": { "aws-sdk": "2.1692.0", "aws4fetch": "1.0.18", "jose": "5.2.3", "opencontrol": "0.0.6", "openid-client": "5.6.4" }, "optionalDependencies": { "sst-darwin-arm64": "3.17.23", "sst-darwin-x64": "3.17.23", "sst-linux-arm64": "3.17.23", "sst-linux-x64": "3.17.23", "sst-linux-x86": "3.17.23", "sst-win32-arm64": "3.17.23", "sst-win32-x64": "3.17.23", "sst-win32-x86": "3.17.23" }, "bin": { "sst": "bin/sst.mjs" } }, "sha512-TwKgUgDnZdc1Swe+bvCNeyO4dQnYz5cTodMpYj3jlXZdK9/KNz0PVxT1f0u5E76i1pmilXrUBL/f7iiMPw4RDg=="], - "sst": ["sst@3.17.23", "", { "dependencies": { "aws-sdk": "2.1692.0", "aws4fetch": "1.0.18", "jose": "5.2.3", "opencontrol": "0.0.6", "openid-client": "5.6.4" }, "optionalDependencies": { "sst-darwin-arm64": "3.17.23", "sst-darwin-x64": "3.17.23", "sst-linux-arm64": "3.17.23", "sst-linux-x64": "3.17.23", "sst-linux-x86": "3.17.23", "sst-win32-arm64": "3.17.23", "sst-win32-x64": "3.17.23", "sst-win32-x86": "3.17.23" }, "bin": { "sst": "bin/sst.mjs" } }, "sha512-TwKgUgDnZdc1Swe+bvCNeyO4dQnYz5cTodMpYj3jlXZdK9/KNz0PVxT1f0u5E76i1pmilXrUBL/f7iiMPw4RDg=="], + "sst-darwin-arm64": ["sst-darwin-arm64@3.17.23", "https://npm-proxy.dev.databricks.com/sst-darwin-arm64/-/sst-darwin-arm64-3.17.23.tgz", { "os": "darwin", "cpu": "arm64" }, "sha512-R6kvmF+rUideOoU7KBs2SdvrIupoE+b+Dor/eq9Uo4Dojj7KvYDZI/EDm8sSCbbcx/opiWeyNqKtlnLEdCxE6g=="], - "sst-darwin-arm64": ["sst-darwin-arm64@3.17.23", "", { "os": "darwin", "cpu": "arm64" }, "sha512-R6kvmF+rUideOoU7KBs2SdvrIupoE+b+Dor/eq9Uo4Dojj7KvYDZI/EDm8sSCbbcx/opiWeyNqKtlnLEdCxE6g=="], + "sst-darwin-x64": ["sst-darwin-x64@3.17.23", "https://npm-proxy.dev.databricks.com/sst-darwin-x64/-/sst-darwin-x64-3.17.23.tgz", { "os": "darwin", "cpu": "x64" }, "sha512-WW4P1S35iYCifQXxD+sE3wuzcN+LHLpuKMaNoaBqEcWGZnH3IPaDJ7rpLF0arkDAo/z3jZmWWzOCkr0JuqJ8vQ=="], - "sst-darwin-x64": ["sst-darwin-x64@3.17.23", "", { "os": "darwin", "cpu": "x64" }, "sha512-WW4P1S35iYCifQXxD+sE3wuzcN+LHLpuKMaNoaBqEcWGZnH3IPaDJ7rpLF0arkDAo/z3jZmWWzOCkr0JuqJ8vQ=="], + "sst-linux-arm64": ["sst-linux-arm64@3.17.23", "https://npm-proxy.dev.databricks.com/sst-linux-arm64/-/sst-linux-arm64-3.17.23.tgz", { "os": "linux", "cpu": "arm64" }, "sha512-TjtNqgIh7RlAWgPLFCAt0mXvIB+J7WjmRvIRrAdX0mXsndOiBJ/DMOgXSLVsIWHCfPj8MIEot/hWpnJgXgIeag=="], - "sst-linux-arm64": ["sst-linux-arm64@3.17.23", "", { "os": "linux", "cpu": "arm64" }, "sha512-TjtNqgIh7RlAWgPLFCAt0mXvIB+J7WjmRvIRrAdX0mXsndOiBJ/DMOgXSLVsIWHCfPj8MIEot/hWpnJgXgIeag=="], + "sst-linux-x64": ["sst-linux-x64@3.17.23", "https://npm-proxy.dev.databricks.com/sst-linux-x64/-/sst-linux-x64-3.17.23.tgz", { "os": "linux", "cpu": "x64" }, "sha512-qdqJiEbYfCjZlI3F/TA6eoIU7JXVkEEI/UMILNf2JWhky0KQdCW2Xyz+wb6c0msVJCWdUM/uj+1DaiP2eXvghw=="], - "sst-linux-x64": ["sst-linux-x64@3.17.23", "", { "os": "linux", "cpu": "x64" }, "sha512-qdqJiEbYfCjZlI3F/TA6eoIU7JXVkEEI/UMILNf2JWhky0KQdCW2Xyz+wb6c0msVJCWdUM/uj+1DaiP2eXvghw=="], + "sst-linux-x86": ["sst-linux-x86@3.17.23", "https://npm-proxy.dev.databricks.com/sst-linux-x86/-/sst-linux-x86-3.17.23.tgz", { "os": "linux", "cpu": "none" }, "sha512-aGmUujIvoNlmAABEGsOgfY1rxD9koC6hN8bnTLbDI+oI/u/zjHYh50jsbL0p3TlaHpwF/lxP3xFSuT6IKp+KgA=="], - "sst-linux-x86": ["sst-linux-x86@3.17.23", "", { "os": "linux", "cpu": "none" }, "sha512-aGmUujIvoNlmAABEGsOgfY1rxD9koC6hN8bnTLbDI+oI/u/zjHYh50jsbL0p3TlaHpwF/lxP3xFSuT6IKp+KgA=="], + "sst-win32-arm64": ["sst-win32-arm64@3.17.23", "https://npm-proxy.dev.databricks.com/sst-win32-arm64/-/sst-win32-arm64-3.17.23.tgz", { "os": "win32", "cpu": "arm64" }, "sha512-ZxdkGqYDrrZGz98rijDCN+m5yuCcwD6Bc9/6hubLsvdpNlVorUqzpg801Ec97xSK0nIC9g6pNiRyxAcsQQstUg=="], - "sst-win32-arm64": ["sst-win32-arm64@3.17.23", "", { "os": "win32", "cpu": "arm64" }, "sha512-ZxdkGqYDrrZGz98rijDCN+m5yuCcwD6Bc9/6hubLsvdpNlVorUqzpg801Ec97xSK0nIC9g6pNiRyxAcsQQstUg=="], + "sst-win32-x64": ["sst-win32-x64@3.17.23", "https://npm-proxy.dev.databricks.com/sst-win32-x64/-/sst-win32-x64-3.17.23.tgz", { "os": "win32", "cpu": "x64" }, "sha512-yc9cor4MS49Ccy2tQCF1tf6M81yLeSGzGL+gjhUxpVKo2pN3bxl3w70eyU/mTXSEeyAmG9zEfbt6FNu4sy5cUA=="], - "sst-win32-x64": ["sst-win32-x64@3.17.23", "", { "os": "win32", "cpu": "x64" }, "sha512-yc9cor4MS49Ccy2tQCF1tf6M81yLeSGzGL+gjhUxpVKo2pN3bxl3w70eyU/mTXSEeyAmG9zEfbt6FNu4sy5cUA=="], + "sst-win32-x86": ["sst-win32-x86@3.17.23", "https://npm-proxy.dev.databricks.com/sst-win32-x86/-/sst-win32-x86-3.17.23.tgz", { "os": "win32", "cpu": "none" }, "sha512-DIp3s54IpNAfdYjSRt6McvkbEPQDMxUu6RUeRAd2C+FcTJgTloon/ghAPQBaDgu2VoVgymjcJARO/XyfKcCLOQ=="], - "sst-win32-x86": ["sst-win32-x86@3.17.23", "", { "os": "win32", "cpu": "none" }, "sha512-DIp3s54IpNAfdYjSRt6McvkbEPQDMxUu6RUeRAd2C+FcTJgTloon/ghAPQBaDgu2VoVgymjcJARO/XyfKcCLOQ=="], + "statuses": ["statuses@2.0.2", "https://npm-proxy.dev.databricks.com/statuses/-/statuses-2.0.2.tgz", {}, "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw=="], - "statuses": ["statuses@2.0.2", "", {}, "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw=="], + "toidentifier": ["toidentifier@1.0.1", "https://npm-proxy.dev.databricks.com/toidentifier/-/toidentifier-1.0.1.tgz", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], - "toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], + "type-is": ["type-is@2.0.1", "https://npm-proxy.dev.databricks.com/type-is/-/type-is-2.0.1.tgz", { "dependencies": { "content-type": "^1.0.5", "media-typer": "^1.1.0", "mime-types": "^3.0.0" } }, "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw=="], - "type-is": ["type-is@2.0.1", "", { "dependencies": { "content-type": "^1.0.5", "media-typer": "^1.1.0", "mime-types": "^3.0.0" } }, "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw=="], + "undici-types": ["undici-types@6.20.0", "https://npm-proxy.dev.databricks.com/undici-types/-/undici-types-6.20.0.tgz", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], - "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], + "unpipe": ["unpipe@1.0.0", "https://npm-proxy.dev.databricks.com/unpipe/-/unpipe-1.0.0.tgz", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], - "unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], + "url": ["url@0.10.3", "https://npm-proxy.dev.databricks.com/url/-/url-0.10.3.tgz", { "dependencies": { "punycode": "1.3.2", "querystring": "0.2.0" } }, "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ=="], - "url": ["url@0.10.3", "", { "dependencies": { "punycode": "1.3.2", "querystring": "0.2.0" } }, "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ=="], + "util": ["util@0.12.5", "https://npm-proxy.dev.databricks.com/util/-/util-0.12.5.tgz", { "dependencies": { "inherits": "^2.0.3", "is-arguments": "^1.0.4", "is-generator-function": "^1.0.7", "is-typed-array": "^1.1.3", "which-typed-array": "^1.1.2" } }, "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA=="], - "util": ["util@0.12.5", "", { "dependencies": { "inherits": "^2.0.3", "is-arguments": "^1.0.4", "is-generator-function": "^1.0.7", "is-typed-array": "^1.1.3", "which-typed-array": "^1.1.2" } }, "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA=="], + "uuid": ["uuid@8.0.0", "https://npm-proxy.dev.databricks.com/uuid/-/uuid-8.0.0.tgz", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw=="], - "uuid": ["uuid@8.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw=="], + "vary": ["vary@1.1.2", "https://npm-proxy.dev.databricks.com/vary/-/vary-1.1.2.tgz", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="], - "vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="], + "web-streams-polyfill": ["web-streams-polyfill@3.3.3", "https://npm-proxy.dev.databricks.com/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", {}, "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw=="], - "which-typed-array": ["which-typed-array@1.1.19", "", { "dependencies": { "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.8", "call-bound": "^1.0.4", "for-each": "^0.3.5", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-tostringtag": "^1.0.2" } }, "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw=="], + "which-typed-array": ["which-typed-array@1.1.19", "https://npm-proxy.dev.databricks.com/which-typed-array/-/which-typed-array-1.1.19.tgz", { "dependencies": { "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.8", "call-bound": "^1.0.4", "for-each": "^0.3.5", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-tostringtag": "^1.0.2" } }, "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw=="], - "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="], + "wrappy": ["wrappy@1.0.2", "https://npm-proxy.dev.databricks.com/wrappy/-/wrappy-1.0.2.tgz", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="], - "xml2js": ["xml2js@0.6.2", "", { "dependencies": { "sax": ">=0.6.0", "xmlbuilder": "~11.0.0" } }, "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA=="], + "xml2js": ["xml2js@0.6.2", "https://npm-proxy.dev.databricks.com/xml2js/-/xml2js-0.6.2.tgz", { "dependencies": { "sax": ">=0.6.0", "xmlbuilder": "~11.0.0" } }, "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA=="], - "xmlbuilder": ["xmlbuilder@11.0.1", "", {}, "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="], + "xmlbuilder": ["xmlbuilder@11.0.1", "https://npm-proxy.dev.databricks.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz", {}, "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="], - "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "yallist": ["yallist@4.0.0", "https://npm-proxy.dev.databricks.com/yallist/-/yallist-4.0.0.tgz", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - "zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="], + "zod": ["zod@3.24.2", "https://npm-proxy.dev.databricks.com/zod/-/zod-3.24.2.tgz", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="], - "zod-to-json-schema": ["zod-to-json-schema@3.24.3", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-HIAfWdYIt1sssHfYZFCXp4rU1w2r8hVVXYIlmoa0r0gABLs5di3RCqPU5DDROogVz1pAdYBaz7HK5n9pSUNs3A=="], + "zod-to-json-schema": ["zod-to-json-schema@3.24.3", "https://npm-proxy.dev.databricks.com/zod-to-json-schema/-/zod-to-json-schema-3.24.3.tgz", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-HIAfWdYIt1sssHfYZFCXp4rU1w2r8hVVXYIlmoa0r0gABLs5di3RCqPU5DDROogVz1pAdYBaz7HK5n9pSUNs3A=="], - "@models.dev/function/@cloudflare/workers-types": ["@cloudflare/workers-types@4.20250522.0", "", {}, "sha512-9RIffHobc35JWeddzBguGgPa4wLDr5x5F94+0/qy7LiV6pTBQ/M5qGEN9VA16IDT3EUpYI0WKh6VpcmeVEtVtw=="], + "@models.dev/function/@cloudflare/workers-types": ["@cloudflare/workers-types@4.20250522.0", "https://npm-proxy.dev.databricks.com/@cloudflare/workers-types/-/workers-types-4.20250522.0.tgz", {}, "sha512-9RIffHobc35JWeddzBguGgPa4wLDr5x5F94+0/qy7LiV6pTBQ/M5qGEN9VA16IDT3EUpYI0WKh6VpcmeVEtVtw=="], - "bun-types/@types/node": ["@types/node@24.0.3", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-R4I/kzCYAdRLzfiCabn9hxWfbuHS573x+r0dJMkkzThEa7pbrcDWK+9zu3e7aBOouf+rQAciqPFMnxwr0aWgKg=="], + "bun-types/@types/node": ["@types/node@24.0.3", "https://npm-proxy.dev.databricks.com/@types/node/-/node-24.0.3.tgz", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-R4I/kzCYAdRLzfiCabn9hxWfbuHS573x+r0dJMkkzThEa7pbrcDWK+9zu3e7aBOouf+rQAciqPFMnxwr0aWgKg=="], - "http-errors/statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="], + "http-errors/statuses": ["statuses@2.0.1", "https://npm-proxy.dev.databricks.com/statuses/-/statuses-2.0.1.tgz", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="], - "models.dev/@types/bun": ["@types/bun@1.3.0", "", { "dependencies": { "bun-types": "1.3.0" } }, "sha512-+lAGCYjXjip2qY375xX/scJeVRmZ5cY0wyHYyCYxNcdEXrQ4AOe3gACgd4iQ8ksOslJtW4VNxBJ8llUwc3a6AA=="], + "models.dev/@types/bun": ["@types/bun@1.3.0", "https://npm-proxy.dev.databricks.com/@types/bun/-/bun-1.3.0.tgz", { "dependencies": { "bun-types": "1.3.0" } }, "sha512-+lAGCYjXjip2qY375xX/scJeVRmZ5cY0wyHYyCYxNcdEXrQ4AOe3gACgd4iQ8ksOslJtW4VNxBJ8llUwc3a6AA=="], - "opencontrol/@tsconfig/bun": ["@tsconfig/bun@1.0.7", "", {}, "sha512-udGrGJBNQdXGVulehc1aWT73wkR9wdaGBtB6yL70RJsqwW/yJhIg6ZbRlPOfIUiFNrnBuYLBi9CSmMKfDC7dvA=="], + "opencontrol/@tsconfig/bun": ["@tsconfig/bun@1.0.7", "https://npm-proxy.dev.databricks.com/@tsconfig/bun/-/bun-1.0.7.tgz", {}, "sha512-udGrGJBNQdXGVulehc1aWT73wkR9wdaGBtB6yL70RJsqwW/yJhIg6ZbRlPOfIUiFNrnBuYLBi9CSmMKfDC7dvA=="], - "opencontrol/hono": ["hono@4.7.4", "", {}, "sha512-Pst8FuGqz3L7tFF+u9Pu70eI0xa5S3LPUmrNd5Jm8nTHze9FxLTK9Kaj5g/k4UcwuJSXTP65SyHOPLrffpcAJg=="], + "opencontrol/hono": ["hono@4.7.4", "https://npm-proxy.dev.databricks.com/hono/-/hono-4.7.4.tgz", {}, "sha512-Pst8FuGqz3L7tFF+u9Pu70eI0xa5S3LPUmrNd5Jm8nTHze9FxLTK9Kaj5g/k4UcwuJSXTP65SyHOPLrffpcAJg=="], - "openid-client/jose": ["jose@4.15.9", "", {}, "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA=="], + "openid-client/jose": ["jose@4.15.9", "https://npm-proxy.dev.databricks.com/jose/-/jose-4.15.9.tgz", {}, "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA=="], - "bun-types/@types/node/undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="], + "bun-types/@types/node/undici-types": ["undici-types@7.8.0", "https://npm-proxy.dev.databricks.com/undici-types/-/undici-types-7.8.0.tgz", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="], - "models.dev/@types/bun/bun-types": ["bun-types@1.3.0", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-u8X0thhx+yJ0KmkxuEo9HAtdfgCBaM/aI9K90VQcQioAmkVp3SG3FkwWGibUFz3WdXAdcsqOcbU40lK7tbHdkQ=="], + "models.dev/@types/bun/bun-types": ["bun-types@1.3.0", "https://npm-proxy.dev.databricks.com/bun-types/-/bun-types-1.3.0.tgz", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-u8X0thhx+yJ0KmkxuEo9HAtdfgCBaM/aI9K90VQcQioAmkVp3SG3FkwWGibUFz3WdXAdcsqOcbU40lK7tbHdkQ=="], - "models.dev/@types/bun/bun-types/@types/node": ["@types/node@24.0.3", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-R4I/kzCYAdRLzfiCabn9hxWfbuHS573x+r0dJMkkzThEa7pbrcDWK+9zu3e7aBOouf+rQAciqPFMnxwr0aWgKg=="], + "models.dev/@types/bun/bun-types/@types/node": ["@types/node@24.0.3", "https://npm-proxy.dev.databricks.com/@types/node/-/node-24.0.3.tgz", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-R4I/kzCYAdRLzfiCabn9hxWfbuHS573x+r0dJMkkzThEa7pbrcDWK+9zu3e7aBOouf+rQAciqPFMnxwr0aWgKg=="], - "models.dev/@types/bun/bun-types/@types/node/undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="], + "models.dev/@types/bun/bun-types/@types/node/undici-types": ["undici-types@7.8.0", "https://npm-proxy.dev.databricks.com/undici-types/-/undici-types-7.8.0.tgz", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="], } } diff --git a/package.json b/package.json index 61800f8a7..6148df147 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,10 @@ "helicone:generate": "bun ./packages/core/script/generate-helicone.ts", "venice:generate": "bun ./packages/core/script/generate-venice.ts", "vercel:generate": "bun ./packages/core/script/generate-vercel.ts", - "wandb:generate": "bun ./packages/core/script/generate-wandb.ts" + "wandb:generate": "bun ./packages/core/script/generate-wandb.ts", + "databricks:list-gateway": "bun ./packages/core/script/list-databricks-ai-gateway.ts", + "databricks:test-inference": "bun ./packages/core/script/test-databricks.ts", + "databricks:probe-capabilities": "bun ./packages/core/script/probe-databricks-capabilities.ts" }, "dependencies": { "@cloudflare/workers-types": "^4.20250801.0", diff --git a/packages/core/package.json b/packages/core/package.json index bb74eae36..d03b2aded 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -4,6 +4,7 @@ "$schema": "https://json.schemastore.org/package.json", "type": "module", "dependencies": { + "@databricks/sdk-experimental": "^0.16.0", "zod": "catalog:" }, "main": "./src/index.ts", diff --git a/packages/core/script/databricks-ai-gateway-shared.ts b/packages/core/script/databricks-ai-gateway-shared.ts new file mode 100644 index 000000000..ad8c3027c --- /dev/null +++ b/packages/core/script/databricks-ai-gateway-shared.ts @@ -0,0 +1,77 @@ +/** + * Shared types and helpers for Databricks AI Gateway discovery (system.ai FMA routes). + */ + +import { WorkspaceClient } from "@databricks/sdk-experimental"; + +export interface Destination { + name?: string; + type?: string; +} + +export interface Endpoint { + name?: string; + ai_gateway_url?: string; + config?: { destinations?: Destination[] }; +} + +export interface EndpointsResponse { + endpoints?: Endpoint[]; +} + +export interface FilteredGatewayRoute { + gateway_name: string; + system_ai_destinations: string[]; + ai_gateway_url?: string; +} + +export function isSystemAiFma(dest: Destination | undefined): boolean { + if (!dest) return false; + const t = dest.type ?? ""; + const name = dest.name ?? ""; + return t === "PAY_PER_TOKEN_FOUNDATION_MODEL" && name.startsWith("system.ai."); +} + +export function filterEndpoints(endpoints: Endpoint[]): FilteredGatewayRoute[] { + const out: FilteredGatewayRoute[] = []; + + for (const ep of endpoints) { + const name = ep.name ?? ""; + if (!name.startsWith("databricks-")) continue; + const dests = ep.config?.destinations ?? []; + const sysAi = dests + .filter(isSystemAiFma) + .map((d) => d.name!) + .filter(Boolean); + if (sysAi.length === 0) continue; + out.push({ + gateway_name: name, + system_ai_destinations: sysAi, + ai_gateway_url: ep.ai_gateway_url, + }); + } + out.sort((a, b) => a.gateway_name.localeCompare(b.gateway_name)); + return out; +} + +export async function fetchFilteredGatewayRoutes( + client: WorkspaceClient, +): Promise { + const raw = (await client.apiClient.request( + { + path: "/api/ai-gateway/v2/endpoints", + method: "GET", + headers: new Headers(), + raw: false, + }, + undefined, + )) as EndpointsResponse; + + return filterEndpoints(raw.endpoints ?? []); +} + +/** OpenAI-compatible base URL for chat/embeddings (no trailing slash). */ +export function mlflowOpenAiBaseUrl(aiGatewayUrl: string): string { + const u = aiGatewayUrl.replace(/\/$/, ""); + return `${u}/mlflow/v1`; +} diff --git a/packages/core/script/list-databricks-ai-gateway.ts b/packages/core/script/list-databricks-ai-gateway.ts new file mode 100644 index 000000000..e1e0dc41c --- /dev/null +++ b/packages/core/script/list-databricks-ai-gateway.ts @@ -0,0 +1,63 @@ +#!/usr/bin/env bun +/** + * List Databricks AI Gateway routes aligned with Unity Catalog system.ai foundation models. + * + * Uses Databricks WorkspaceClient (JavaScript SDK; same auth patterns as ~/.databrickscfg, profiles, env). + * + * Usage (from repo root): + * bun run databricks:list-gateway -- --profile YOUR_PROFILE + * bun run databricks:list-gateway -- --profile YOUR_PROFILE --json + */ + +import { WorkspaceClient } from "@databricks/sdk-experimental"; +import { fetchFilteredGatewayRoutes } from "./databricks-ai-gateway-shared.js"; + +function parseArgs() { + const argv = process.argv.slice(2); + let profile = process.env.DATABRICKS_CONFIG_PROFILE; + let json = false; + for (let i = 0; i < argv.length; i++) { + const a = argv[i]; + if (a === "--profile" && argv[i + 1]) { + profile = argv[++i]; + continue; + } + if (a === "--json") { + json = true; + continue; + } + if (a === "--help" || a === "-h") { + console.log(`Usage: list-databricks-ai-gateway.ts [--profile NAME] [--json] + + --profile Databricks config profile (~/.databrickscfg). Default: DATABRICKS_CONFIG_PROFILE or SDK default chain. + --json Print JSON array instead of TSV lines. +`); + process.exit(0); + } + } + return { profile, json }; +} + +async function main() { + const { profile, json } = parseArgs(); + + const client = new WorkspaceClient(profile ? { profile } : {}); + const rows = await fetchFilteredGatewayRoutes(client); + + if (json) { + console.log(JSON.stringify(rows, null, 2)); + return; + } + + const hostUrl = (await client.apiClient.host).toString(); + console.log(`# host: ${hostUrl}`); + console.log(`# count: ${rows.length}\n`); + for (const r of rows) { + console.log(`${r.gateway_name}\t${r.system_ai_destinations.join(", ")}`); + } +} + +main().catch((e) => { + console.error(e); + process.exit(1); +}); diff --git a/packages/core/script/probe-databricks-capabilities.ts b/packages/core/script/probe-databricks-capabilities.ts new file mode 100644 index 000000000..753426fe0 --- /dev/null +++ b/packages/core/script/probe-databricks-capabilities.ts @@ -0,0 +1,1374 @@ +#!/usr/bin/env bun +/** + * probe-databricks-capabilities.ts + * + * PURPOSE + * ------- + * Live-probe every Databricks AI Gateway model and compare the observed + * capability signals against the catalog flags declared in + * providers/databricks/models/*.toml + * + * This is a READ-ONLY diagnostic tool. It never modifies TOML files. + * Mismatches are reported so a human can decide whether to update the catalog. + * + * OUTPUT + * ------ + * - Console: per-model pass/fail for each capability + a summary table at the end. + * - JSON file: databricks-capability-probe-.json written next to the + * providers/ folder. Contains the full raw row data for offline analysis. + * + * USAGE + * ----- + * bun ./packages/core/script/probe-databricks-capabilities.ts -- --profile YOUR_PROFILE + * bun run databricks:probe-capabilities -- --profile YOUR_PROFILE + * + * --profile Databricks CLI profile to use (reads host + token from ~/.databrickscfg). + * Defaults to DEFAULT if omitted. + * --delay Milliseconds to wait between API calls (default: 2000). + * Increase if you hit rate limits. + * + * API SURFACES TESTED + * ------------------- + * chat mlflow/v1/chat/completions (OpenAI-compatible — GPT-5, Llama, Gemma, Qwen, …) + * anthropic /anthropic/v1/messages (Claude models) + * gemini /gemini/v1beta/… (Gemini models) + * responses /openai/v1/responses (OpenAI Responses API — Codex models) + * embedding mlflow/v1/embeddings (embedding-only models; skipped for all probes) + * + * CAPABILITY PROBES + * ----------------- + * - "tool" API accepted a tools payload with tool_choice:"required" and returned a + * tool call in the response. Using "required" (not "auto") avoids false + * negatives where the model answers in text instead of calling the tool. + * + * - "reasoning" Three-tier heuristic (any one hit = supported): + * 1. Response JSON contains reasoning-shaped keys + * (reasoning_content / thinking / thought / redacted_thinking / …) + * 2. usage.completion_tokens_details.reasoning_tokens > 0 + * (OpenAI-style internal reasoning — gpt-5-nano, o-series, etc.) + * 3. Response content contains ... blocks + * (OSS CoT models like Gemma; triggered via a system prompt) + * + * - "attachment" Model accepted an 8×8 solid-blue PNG as an inline base64 image_url + * and returned a successful text response. An 8×8 image is used because + * several backends reject 1×1 PNGs as "degenerate". + * + * - "pdf" Model accepted a minimal valid PDF as an inline attachment and returned + * a successful text response. Sent via the appropriate format per surface + * (image_url for mlflow/v1, document block for Anthropic, inlineData for + * Gemini, input_file for Responses API). + * + * - "temperature" Model accepted temperature=0.7 (non-default) without an error response. + */ + +import path from "node:path"; +import { readFile, readdir } from "node:fs/promises"; +import { WorkspaceClient } from "@databricks/sdk-experimental"; +import { + fetchFilteredGatewayRoutes, + mlflowOpenAiBaseUrl, +} from "./databricks-ai-gateway-shared.js"; + +const MODELS_DIR = path.join( + import.meta.dir, + "../../../providers/databricks/models", +); + +type Kind = "chat" | "embedding" | "responses" | "anthropic" | "gemini"; + +/** + * Build a valid uncompressed 8×8 solid blue PNG entirely from raw bytes so we + * don't need an image library. The 1×1 PNG used previously was rejected as + * "degenerate" by several vision backends; 8×8 passes all of them. + * + * PNG structure: Signature · IHDR · IDAT (deflate-wrapped scanlines) · IEND + */ +function makePng8x8Blue(): string { + // CRC-32 helper (IEEE polynomial) + function crc32(buf: Uint8Array): number { + let c = 0xffffffff; + for (const b of buf) { + c ^= b; + for (let k = 0; k < 8; k++) c = c & 1 ? (c >>> 1) ^ 0xedb88320 : c >>> 1; + } + return (c ^ 0xffffffff) >>> 0; + } + function chunk(type: string, data: Uint8Array): Uint8Array { + const t = new TextEncoder().encode(type); + const len = new Uint8Array(4); + new DataView(len.buffer).setUint32(0, data.length, false); + const body = new Uint8Array(t.length + data.length); + body.set(t); + body.set(data, 4); + const crcVal = new Uint8Array(4); + new DataView(crcVal.buffer).setUint32(0, crc32(body), false); + const out = new Uint8Array(4 + 4 + data.length + 4); + out.set(len); + out.set(body, 4); + out.set(crcVal, 4 + 4 + data.length); + return out; + } + + // IHDR: 8×8, 8-bit RGB + const ihdrData = new Uint8Array(13); + const dv = new DataView(ihdrData.buffer); + dv.setUint32(0, 8, false); // width + dv.setUint32(4, 8, false); // height + ihdrData[8] = 8; // bit depth + ihdrData[9] = 2; // color type: RGB + ihdrData[10] = 0; // compression + ihdrData[11] = 0; // filter + ihdrData[12] = 0; // interlace + + // Raw scanlines: filter byte (0) + 8 pixels × 3 bytes (R=0,G=0,B=255 blue) + const raw = new Uint8Array(8 * (1 + 8 * 3)); + for (let row = 0; row < 8; row++) { + raw[row * 25] = 0; // filter byte + for (let col = 0; col < 8; col++) { + raw[row * 25 + 1 + col * 3 + 2] = 255; // B channel + } + } + + // Minimal deflate: non-compressed block (BTYPE=00) + const deflated = new Uint8Array(2 + 5 + raw.length + 4); + deflated[0] = 0x78; deflated[1] = 0x01; // zlib header + deflated[2] = 0x01; // BFINAL=1, BTYPE=00 + deflated[3] = raw.length & 0xff; + deflated[4] = (raw.length >> 8) & 0xff; + deflated[5] = (~raw.length) & 0xff; + deflated[6] = ((~raw.length) >> 8) & 0xff; + deflated.set(raw, 7); + // Adler-32 checksum + let s1 = 1, s2 = 0; + for (const b of raw) { s1 = (s1 + b) % 65521; s2 = (s2 + s1) % 65521; } + const adler = (s2 << 16) | s1; + const dvD = new DataView(deflated.buffer); + dvD.setUint32(7 + raw.length, adler, false); + + const sig = new Uint8Array([137, 80, 78, 71, 13, 10, 26, 10]); + const ihdr = chunk("IHDR", ihdrData); + const idat = chunk("IDAT", deflated); + const iend = chunk("IEND", new Uint8Array(0)); + + const total = sig.length + ihdr.length + idat.length + iend.length; + const png = new Uint8Array(total); + let off = 0; + for (const part of [sig, ihdr, idat, iend]) { png.set(part, off); off += part.length; } + + return Buffer.from(png).toString("base64"); +} + +const TINY_PNG_B64 = makePng8x8Blue(); + +/** + * Build a minimal but structurally valid PDF (single page, "Hello" text). + * Enough for all vision backends to parse; we just need a non-error response. + */ +function makeTinyPdf(): string { + const body = [ + "%PDF-1.4", + "1 0 obj<>endobj", + "2 0 obj<>endobj", + "3 0 obj<>>>>>", + ">>endobj", + "4 0 obj<>", + "stream", + "BT /F1 12 Tf 100 700 Td (Hello PDF) Tj ET", + "endstream", + "endobj", + "xref", + "0 5", + "0000000000 65535 f ", + ].join("\n"); + const offsets = [9]; // rough byte offsets – good enough for a probe + const xref = body.lastIndexOf("xref"); + const trailer = `trailer<>\nstartxref\n${xref}\n%%EOF`; + const full = body + "\n" + trailer; + return Buffer.from(full).toString("base64"); +} + +const TINY_PDF_B64 = makeTinyPdf(); + +interface CatalogFlags { + reasoning: boolean; + tool_call: boolean; + attachment: boolean; + pdf: boolean; + temperature: boolean; + modalities_input: string[]; + modalities_output: string[]; +} + +interface Row { + model: string; + kind: Kind; + catalog: CatalogFlags; + toolObserved: boolean; + toolError?: string; + reasoningObserved: boolean; + reasoningHint?: string; + reasoningError?: string; + attachmentObserved: boolean; + attachmentError?: string; + pdfObserved: boolean; + pdfError?: string; + temperatureObserved: boolean; + temperatureError?: string; +} + +function isOpenAiResponsesRoute(model: string): boolean { + return model.includes("-codex"); +} +function isClaudeGatewayModel(model: string): boolean { + return model.includes("claude"); +} +function isGeminiGatewayModel(model: string): boolean { + return model.includes("gemini"); +} + +async function loadEmbeddingModelIds(): Promise> { + const ids = new Set(); + const files = await readdir(MODELS_DIR); + for (const f of files) { + if (!f.endsWith(".toml")) continue; + const text = await readFile(path.join(MODELS_DIR, f), "utf8"); + if (/family\s*=\s*"text-embedding"/.test(text)) { + ids.add(f.replace(/\.toml$/, "")); + } + } + return ids; +} + +async function loadCatalogFlags(modelId: string): Promise { + const p = path.join(MODELS_DIR, `${modelId}.toml`); + const text = await readFile(p, "utf8"); + const reasoningM = text.match(/^reasoning\s*=\s*(true|false)\s*$/m); + const toolM = text.match(/^tool_call\s*=\s*(true|false)\s*$/m); + const attachM = text.match(/^attachment\s*=\s*(true|false)\s*$/m); + const inputM = text.match(/^input\s*=\s*\[([^\]]*)\]/m); + const outputM = text.match(/^output\s*=\s*\[([^\]]*)\]/m); + const parseModalities = (raw: string | undefined): string[] => { + if (!raw) return []; + return (raw[1] ?? "") + .split(",") + .map((s) => s.trim().replace(/"/g, "")) + .filter(Boolean); + }; + const tempM = text.match(/^temperature\s*=\s*(true|false)\s*$/m); + const inputModalities = parseModalities(inputM); + return { + reasoning: reasoningM?.[1] === "true", + tool_call: toolM?.[1] === "true", + attachment: attachM?.[1] === "true", + pdf: inputModalities.includes("pdf"), + temperature: tempM?.[1] === "true", + modalities_input: inputModalities, + modalities_output: parseModalities(outputM), + }; +} + +async function authHeaders(client: WorkspaceClient): Promise { + const h = new Headers(); + await client.config.authenticate(h); + return h; +} + +function openAiResponsesEndpoint(aiGatewayUrl: string): string { + return `${aiGatewayUrl.replace(/\/$/, "")}/openai/v1/responses`; +} +function anthropicMessagesUrl(aiGatewayUrl: string): string { + return `${aiGatewayUrl.replace(/\/$/, "")}/anthropic/v1/messages`; +} +function geminiGenerateUrl(aiGatewayUrl: string, model: string): string { + const base = aiGatewayUrl.replace(/\/$/, ""); + return `${base}/gemini/v1beta/models/${model}:generateContent`; +} + +const WEATHER_TOOL_OPENAI = { + type: "function", + function: { + name: "get_weather", + description: "Get weather for a city", + parameters: { + type: "object", + properties: { location: { type: "string", description: "City name" } }, + required: ["location"], + }, + }, +} as const; + +async function probeToolsChat( + mlflowBase: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(`${mlflowBase}/chat/completions`, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 512, + messages: [ + { + role: "user", + content: + "You MUST call the get_weather function with location='Tokyo'. Do not answer in text.", + }, + ], + tools: [WEATHER_TOOL_OPENAI], + tool_choice: "required", + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 400)}` }; + try { + const j = JSON.parse(text) as { + choices?: Array<{ + message?: { tool_calls?: unknown[]; content?: unknown }; + }>; + }; + const tc = j.choices?.[0]?.message?.tool_calls; + if (Array.isArray(tc) && tc.length > 0) return { ok: true }; + return { ok: false, error: "no tool_calls in assistant message" }; + } catch { + return { ok: false, error: text.slice(0, 200) }; + } +} + +async function probeToolsAnthropic( + url: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + h.set("anthropic-version", "2023-06-01"); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 512, + tools: [ + { + name: "get_weather", + description: "Get weather for a city", + input_schema: { + type: "object", + properties: { + location: { type: "string", description: "City name" }, + }, + required: ["location"], + }, + }, + ], + messages: [ + { + role: "user", + content: + "Use get_weather for Tokyo only. You must invoke the tool.", + }, + ], + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 400)}` }; + try { + const j = JSON.parse(text) as { + content?: Array<{ type?: string }>; + stop_reason?: string; + }; + const types = (j.content ?? []).map((c) => c.type); + if (types.includes("tool_use")) return { ok: true }; + if (j.stop_reason === "tool_use") return { ok: true }; + return { ok: false, error: `stop_reason=${j.stop_reason} types=${types.join(",")}` }; + } catch { + return { ok: false, error: text.slice(0, 200) }; + } +} + +async function probeToolsGemini( + url: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + contents: [ + { + role: "user", + parts: [{ text: "Call get_weather with location Tokyo. Use the function." }], + }, + ], + tools: [ + { + functionDeclarations: [ + { + name: "get_weather", + description: "Get weather for a city", + parameters: { + type: "object", + properties: { + location: { type: "string" }, + }, + required: ["location"], + }, + }, + ], + }, + ], + toolConfig: { functionCallingConfig: { mode: "AUTO" } }, + generationConfig: { maxOutputTokens: 512 }, + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 400)}` }; + try { + const j = JSON.parse(text) as { + candidates?: Array<{ + content?: { parts?: Array<{ functionCall?: unknown }> }; + }>; + }; + const parts = j.candidates?.[0]?.content?.parts ?? []; + if (parts.some((p) => p.functionCall)) return { ok: true }; + return { ok: false, error: "no functionCall in parts" }; + } catch { + return { ok: false, error: text.slice(0, 200) }; + } +} + +async function probeToolsResponses( + responsesUrl: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(responsesUrl, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + input: [ + { + type: "message", + role: "user", + content: + "Use tool get_weather with location Tokyo. You must call the function.", + }, + ], + tools: [ + { + type: "function", + name: "get_weather", + description: "Get weather for a city", + parameters: { + type: "object", + properties: { location: { type: "string" } }, + required: ["location"], + }, + }, + ], + tool_choice: "auto", + max_output_tokens: 512, + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 400)}` }; + try { + const j = JSON.parse(text) as { output?: Array<{ type?: string }> }; + const types = (j.output ?? []).map((o) => o.type); + if (types.some((t) => t?.includes("function") || t === "function_call")) + return { ok: true }; + if (text.includes("function_call") || text.includes("tool_calls")) + return { ok: true }; + return { ok: false, error: `output types: ${types.join(",")}` }; + } catch { + return { ok: false, error: text.slice(0, 200) }; + } +} + +function responseHasReasoningShape(obj: unknown): { hit: boolean; hint: string } { + const s = JSON.stringify(obj); + if ( + s.includes('"reasoning_content"') || + s.includes('"reasoning"') || + s.includes('"thinking"') || + s.includes('"type":"reasoning"') || + s.includes('"summary_text"') || + s.includes('"thought"') || + s.includes("thoughtSignature") || + s.includes("redacted_thinking") + ) { + return { hit: true, hint: "json contains reasoning/thinking-like keys" }; + } + return { hit: false, hint: "" }; +} + +async function probeReasoningChat( + mlflowBase: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; hint?: string; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + + const isOpenAiStyle = /gpt-5|gpt-oss|o4|o3|o1/i.test(model); + // OSS models (Gemma, Llama, Qwen, etc.) that support CoT output blocks + const isOssReasoner = /gemma|qwen|deepseek|phi/i.test(model); + + const body: Record = { + model, + max_tokens: 512, + messages: [ + ...(isOssReasoner + ? [ + { + role: "system", + content: + "You have a thinking/reasoning mode. Before answering, output your " + + "internal reasoning inside ... tags, then give the answer.", + }, + ] + : []), + { + role: "user", + content: "Think step by step briefly, then answer: what is 2+2?", + }, + ], + }; + + // OpenAI-style reasoning models accept reasoning_effort + if (isOpenAiStyle) { + body.reasoning_effort = "low"; + } + + const res = await fetch(`${mlflowBase}/chat/completions`, { + method: "POST", + headers: h, + body: JSON.stringify(body), + }); + const text = await res.text(); + + if (!res.ok) { + // Retry without reasoning_effort in case the model rejects it + const retry = await fetch(`${mlflowBase}/chat/completions`, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 512, + messages: body.messages as [], + }), + }); + const t2 = await retry.text(); + if (!retry.ok) return { ok: false, error: `${res.status} then ${retry.status}` }; + try { + const j = JSON.parse(t2) as unknown; + const { hit, hint } = responseHasReasoningShape(j); + return { ok: hit, hint: hit ? hint : "no reasoning-shaped fields" }; + } catch { + return { ok: false, error: t2.slice(0, 200) }; + } + } + + try { + const j = JSON.parse(text) as { + choices?: Array<{ message?: { content?: string } }>; + usage?: { completion_tokens_details?: { reasoning_tokens?: number } }; + }; + + // Primary: explicit reasoning-shaped fields in the response JSON + const { hit, hint } = responseHasReasoningShape(j); + if (hit) return { ok: true, hint }; + + // Secondary: OpenAI internal reasoning — usage.completion_tokens_details.reasoning_tokens > 0 + const reasoningTokens = j.usage?.completion_tokens_details?.reasoning_tokens ?? 0; + if (reasoningTokens > 0) { + return { ok: true, hint: `internal reasoning (${reasoningTokens} reasoning_tokens in usage)` }; + } + + // Tertiary: OSS blocks visible in response content + const content = j.choices?.[0]?.message?.content ?? ""; + if (isOssReasoner && content.includes("")) { + return { ok: true, hint: " block in response content" }; + } + + return { ok: false, hint: "no reasoning-shaped fields" }; + } catch { + return { ok: false, error: text.slice(0, 200) }; + } +} + +async function probeReasoningAnthropic( + url: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; hint?: string; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + h.set("anthropic-version", "2023-06-01"); + h.set( + "anthropic-beta", + "interleaved-thinking-2025-05-14", + ); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 12000, + thinking: { type: "enabled", budget_tokens: 8000 }, + messages: [ + { + role: "user", + content: "What is 7*6? Think briefly then answer with one number.", + }, + ], + }), + }); + const text = await res.text(); + if (!res.ok) { + return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + } + try { + const j = JSON.parse(text) as { + content?: Array<{ type?: string }>; + }; + const types = (j.content ?? []).map((c) => c.type); + if (types.includes("thinking")) return { ok: true, hint: "thinking blocks" }; + const { hit, hint } = responseHasReasoningShape(j); + return { ok: hit, hint: hint || "parsed" }; + } catch { + return { ok: false, error: text.slice(0, 200) }; + } +} + +async function probeReasoningGemini( + url: string, + headers: Headers, +): Promise<{ ok: boolean; hint?: string; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + contents: [ + { + role: "user", + parts: [{ text: "What is 3+5? Answer with one digit only after thinking." }], + }, + ], + generationConfig: { + maxOutputTokens: 512, + thinkingConfig: { thinkingBudget: 1024, includeThoughts: true }, + }, + }), + }); + const text = await res.text(); + if (!res.ok) { + return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + } + try { + const j = JSON.parse(text) as unknown; + const { hit, hint } = responseHasReasoningShape(j); + if (hit) return { ok: true, hint }; + return { ok: false, hint: "no thinking/reasoning fields" }; + } catch { + return { ok: false, error: text.slice(0, 200) }; + } +} + +async function probeReasoningResponses( + responsesUrl: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; hint?: string; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(responsesUrl, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + input: "What is 2+3? One number only.", + reasoning: { effort: "low" }, + max_output_tokens: 512, + }), + }); + const text = await res.text(); + if (!res.ok) { + const res2 = await fetch(responsesUrl, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + input: "What is 2+3? One number only.", + max_output_tokens: 512, + }), + }); + const t2 = await res2.text(); + if (!res2.ok) return { ok: false, error: `${res.status} / ${res2.status}` }; + try { + const j = JSON.parse(t2) as unknown; + const { hit, hint } = responseHasReasoningShape(j); + return { ok: hit, hint: hit ? hint : "minimal" }; + } catch { + return { ok: false, error: t2.slice(0, 200) }; + } + } + try { + const j = JSON.parse(text) as unknown; + const { hit, hint } = responseHasReasoningShape(j); + return { ok: hit, hint: hit ? hint : "responses ok" }; + } catch { + return { ok: false, error: text.slice(0, 200) }; + } +} + +// ── Attachment probes (send a tiny inline PNG image) ────────────────────────── + +async function probeAttachmentChat( + mlflowBase: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(`${mlflowBase}/chat/completions`, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 64, + messages: [ + { + role: "user", + content: [ + { + type: "image_url", + image_url: { url: `data:image/png;base64,${TINY_PNG_B64}` }, + }, + { type: "text", text: "What color is this image? One word." }, + ], + }, + ], + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +async function probeAttachmentAnthropic( + url: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + h.set("anthropic-version", "2023-06-01"); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 64, + messages: [ + { + role: "user", + content: [ + { + type: "image", + source: { + type: "base64", + media_type: "image/png", + data: TINY_PNG_B64, + }, + }, + { type: "text", text: "What color is this image? One word." }, + ], + }, + ], + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +async function probeAttachmentGemini( + url: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + contents: [ + { + role: "user", + parts: [ + { + inlineData: { mimeType: "image/png", data: TINY_PNG_B64 }, + }, + { text: "What color is this image? One word." }, + ], + }, + ], + generationConfig: { maxOutputTokens: 64 }, + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +async function probeAttachmentResponses( + responsesUrl: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(responsesUrl, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_output_tokens: 64, + input: [ + { + type: "message", + role: "user", + content: [ + { + type: "input_image", + image_url: `data:image/png;base64,${TINY_PNG_B64}`, + }, + { type: "input_text", text: "What color is this image? One word." }, + ], + }, + ], + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +// ── PDF probes (send a minimal PDF document) ────────────────────────────────── + +async function probePdfChat( + mlflowBase: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(`${mlflowBase}/chat/completions`, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 64, + messages: [ + { + role: "user", + content: [ + { + type: "image_url", + image_url: { url: `data:application/pdf;base64,${TINY_PDF_B64}` }, + }, + { type: "text", text: "What does this document say? One sentence." }, + ], + }, + ], + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +async function probePdfAnthropic( + url: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + h.set("anthropic-version", "2023-06-01"); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 64, + messages: [ + { + role: "user", + content: [ + { + type: "document", + source: { + type: "base64", + media_type: "application/pdf", + data: TINY_PDF_B64, + }, + }, + { type: "text", text: "What does this document say? One sentence." }, + ], + }, + ], + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +async function probePdfGemini( + url: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + contents: [ + { + role: "user", + parts: [ + { + inlineData: { mimeType: "application/pdf", data: TINY_PDF_B64 }, + }, + { text: "What does this document say? One sentence." }, + ], + }, + ], + generationConfig: { maxOutputTokens: 64 }, + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +async function probePdfResponses( + responsesUrl: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(responsesUrl, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_output_tokens: 64, + input: [ + { + type: "message", + role: "user", + content: [ + { + type: "input_file", + filename: "doc.pdf", + file_data: `data:application/pdf;base64,${TINY_PDF_B64}`, + }, + { type: "input_text", text: "What does this document say? One sentence." }, + ], + }, + ], + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +// ── Temperature probes (send temperature=0.7; only default=1 or unsupported → false) ── + +async function probeTempChat( + mlflowBase: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(`${mlflowBase}/chat/completions`, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 32, + temperature: 0.7, + messages: [{ role: "user", content: "Say: hello" }], + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +async function probeTempAnthropic( + url: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + h.set("anthropic-version", "2023-06-01"); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 32, + temperature: 0.7, + messages: [{ role: "user", content: "Say: hello" }], + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +async function probeTempGemini( + url: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + contents: [{ role: "user", parts: [{ text: "Say: hello" }] }], + generationConfig: { maxOutputTokens: 32, temperature: 0.7 }, + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +async function probeTempResponses( + responsesUrl: string, + model: string, + headers: Headers, +): Promise<{ ok: boolean; error?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const res = await fetch(responsesUrl, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_output_tokens: 32, + temperature: 0.7, + input: "Say: hello", + }), + }); + const text = await res.text(); + if (!res.ok) return { ok: false, error: `${res.status} ${text.slice(0, 300)}` }; + return { ok: true }; +} + +// ────────────────────────────────────────────────────────────────────────────── + +function parseArgs() { + const argv = process.argv.slice(2); + let profile = process.env.DATABRICKS_CONFIG_PROFILE; + let delayMs = 300; + for (let i = 0; i < argv.length; i++) { + const a = argv[i]; + if (a === "--profile" && argv[i + 1]) profile = argv[++i]; + if (a === "--delay-ms" && argv[i + 1]) delayMs = Number(argv[++i]); + } + return { profile, delayMs }; +} + +function sleep(ms: number) { + return new Promise((r) => setTimeout(r, ms)); +} + +async function main() { + const { profile, delayMs } = parseArgs(); + const client = new WorkspaceClient(profile ? { profile } : {}); + const routes = await fetchFilteredGatewayRoutes(client); + const gatewayUrl = routes[0]?.ai_gateway_url; + if (!gatewayUrl) throw new Error("No AI Gateway URL"); + const mlflowBase = mlflowOpenAiBaseUrl(gatewayUrl); + const responsesUrl = openAiResponsesEndpoint(gatewayUrl); + const anthropicUrl = anthropicMessagesUrl(gatewayUrl); + const headers = await authHeaders(client); + const embeddingIds = await loadEmbeddingModelIds(); + + const rows: Row[] = []; + for (const r of routes) { + const model = r.gateway_name; + const kind: Kind = embeddingIds.has(model) + ? "embedding" + : isOpenAiResponsesRoute(model) + ? "responses" + : isClaudeGatewayModel(model) + ? "anthropic" + : isGeminiGatewayModel(model) + ? "gemini" + : "chat"; + const catalog = await loadCatalogFlags(model); + + if (kind === "embedding") { + rows.push({ + model, + kind, + catalog, + toolObserved: false, + reasoningObserved: false, + reasoningHint: "skipped (embedding)", + attachmentObserved: false, + attachmentError: "skipped (embedding)", + pdfObserved: false, + pdfError: "skipped (embedding)", + temperatureObserved: false, + temperatureError: "skipped (embedding)", + }); + continue; + } + + process.stdout.write(`${model} (${kind}) ... `); + let toolObserved = false; + let toolError: string | undefined; + let reasoningObserved = false; + let reasoningHint: string | undefined; + let reasoningError: string | undefined; + let attachmentObserved = false; + let attachmentError: string | undefined; + let pdfObserved = false; + let pdfError: string | undefined; + let temperatureObserved = false; + let temperatureError: string | undefined; + + if (kind === "chat") { + const t = await probeToolsChat(mlflowBase, model, headers); + toolObserved = t.ok; toolError = t.error; + const rr = await probeReasoningChat(mlflowBase, model, headers); + reasoningObserved = rr.ok; reasoningHint = rr.hint; reasoningError = rr.error; + const at = await probeAttachmentChat(mlflowBase, model, headers); + attachmentObserved = at.ok; attachmentError = at.error; + const pd = await probePdfChat(mlflowBase, model, headers); + pdfObserved = pd.ok; pdfError = pd.error; + const tm = await probeTempChat(mlflowBase, model, headers); + temperatureObserved = tm.ok; temperatureError = tm.error; + } else if (kind === "anthropic") { + const t = await probeToolsAnthropic(anthropicUrl, model, headers); + toolObserved = t.ok; toolError = t.error; + const rr = await probeReasoningAnthropic(anthropicUrl, model, headers); + reasoningObserved = rr.ok; reasoningHint = rr.hint; reasoningError = rr.error; + const at = await probeAttachmentAnthropic(anthropicUrl, model, headers); + attachmentObserved = at.ok; attachmentError = at.error; + const pd = await probePdfAnthropic(anthropicUrl, model, headers); + pdfObserved = pd.ok; pdfError = pd.error; + const tm = await probeTempAnthropic(anthropicUrl, model, headers); + temperatureObserved = tm.ok; temperatureError = tm.error; + } else if (kind === "gemini") { + const url = geminiGenerateUrl(gatewayUrl, model); + const t = await probeToolsGemini(url, headers); + toolObserved = t.ok; toolError = t.error; + const rr = await probeReasoningGemini(url, headers); + reasoningObserved = rr.ok; reasoningHint = rr.hint; reasoningError = rr.error; + const at = await probeAttachmentGemini(url, headers); + attachmentObserved = at.ok; attachmentError = at.error; + const pd = await probePdfGemini(url, headers); + pdfObserved = pd.ok; pdfError = pd.error; + const tm = await probeTempGemini(url, headers); + temperatureObserved = tm.ok; temperatureError = tm.error; + } else { + const t = await probeToolsResponses(responsesUrl, model, headers); + toolObserved = t.ok; toolError = t.error; + const rr = await probeReasoningResponses(responsesUrl, model, headers); + reasoningObserved = rr.ok; reasoningHint = rr.hint; reasoningError = rr.error; + const at = await probeAttachmentResponses(responsesUrl, model, headers); + attachmentObserved = at.ok; attachmentError = at.error; + const pd = await probePdfResponses(responsesUrl, model, headers); + pdfObserved = pd.ok; pdfError = pd.error; + const tm = await probeTempResponses(responsesUrl, model, headers); + temperatureObserved = tm.ok; temperatureError = tm.error; + } + + // Policy rule: all gpt-5* models (any variant) report temperature=false on the + // Databricks AI Gateway regardless of what the raw probe observed. The gateway + // either rejects non-default values outright or constrains them to 1. + if (/^databricks-gpt-5/i.test(model)) { + temperatureObserved = false; + temperatureError = temperatureError ?? "policy: gpt-5* temperature always false on Databricks gateway"; + } + + rows.push({ + model, kind, catalog, + toolObserved, toolError, + reasoningObserved, reasoningHint, reasoningError, + attachmentObserved, attachmentError, + pdfObserved, pdfError, + temperatureObserved, temperatureError, + }); + console.log( + `tools=${toolObserved} reasoning=${reasoningObserved} attachment=${attachmentObserved}` + + ` pdf=${pdfObserved} temp=${temperatureObserved}` + + ` cat(r/t/a/p/T)=${catalog.reasoning}/${catalog.tool_call}/${catalog.attachment}/${catalog.pdf}/${catalog.temperature}`, + ); + if (delayMs > 0) await sleep(delayMs); + } + + const outPath = path.join( + import.meta.dir, + `../../../databricks-capability-probe-${Date.now()}.json`, + ); + await Bun.write(outPath, JSON.stringify(rows, null, 2)); + + console.log(`\nWrote ${outPath}`); + + const toolMismatch = rows.filter( + (r) => r.kind !== "embedding" && r.toolObserved !== r.catalog.tool_call, + ); + const reasonMismatch = rows.filter( + (r) => r.kind !== "embedding" && r.reasoningObserved !== r.catalog.reasoning, + ); + const attachMismatch = rows.filter( + (r) => r.kind !== "embedding" && r.attachmentObserved !== r.catalog.attachment, + ); + const pdfMismatch = rows.filter( + (r) => r.kind !== "embedding" && r.pdfObserved !== r.catalog.pdf, + ); + const tempMismatch = rows.filter( + (r) => r.kind !== "embedding" && r.temperatureObserved !== r.catalog.temperature, + ); + + console.log("\n--- Mismatches vs catalog (observed !== TOML) ---"); + console.log("tool_call: ", toolMismatch.length ? toolMismatch.map((r) => r.model) : "none"); + console.log("reasoning: ", reasonMismatch.length ? reasonMismatch.map((r) => r.model) : "none"); + console.log("attachment: ", attachMismatch.length ? attachMismatch.map((r) => r.model) : "none"); + console.log("pdf: ", pdfMismatch.length ? pdfMismatch.map((r) => r.model) : "none"); + console.log("temperature:", tempMismatch.length ? tempMismatch.map((r) => r.model) : "none"); + + console.log("\n--- attachment probe results ---"); + for (const r of rows) { + if (r.kind === "embedding") continue; + const mark = r.attachmentObserved ? "✓" : "✗"; + const catMark = r.catalog.attachment ? "cat=true" : "cat=false"; + const mismatch = r.attachmentObserved !== r.catalog.attachment ? " ← MISMATCH" : ""; + console.log(` ${mark} ${r.model} (${r.kind}) ${catMark}${mismatch}`); + if (r.attachmentError && !r.attachmentObserved) + console.log(` err: ${r.attachmentError.slice(0, 160)}`); + } + + console.log("\n--- pdf probe results ---"); + for (const r of rows) { + if (r.kind === "embedding") continue; + const mark = r.pdfObserved ? "✓" : "✗"; + const catMark = r.catalog.pdf ? "cat=true" : "cat=false"; + const mismatch = r.pdfObserved !== r.catalog.pdf ? " ← MISMATCH" : ""; + console.log(` ${mark} ${r.model} (${r.kind}) ${catMark}${mismatch}`); + if (r.pdfError && !r.pdfObserved) + console.log(` err: ${r.pdfError.slice(0, 160)}`); + } + + for (const r of rows) { + if (r.kind === "embedding") continue; + if (r.toolError && !r.toolObserved) + console.log(` ${r.model} tool err: ${r.toolError.slice(0, 120)}`); + if (r.reasoningError && !r.reasoningObserved) + console.log(` ${r.model} reasoning err: ${r.reasoningError.slice(0, 120)}`); + } + + console.log("\n--- temperature probe results ---"); + for (const r of rows) { + if (r.kind === "embedding") continue; + const mark = r.temperatureObserved ? "✓" : "✗"; + const catMark = r.catalog.temperature ? "cat=true" : "cat=false"; + const mismatch = r.temperatureObserved !== r.catalog.temperature ? " ← MISMATCH" : ""; + console.log(` ${mark} ${r.model} (${r.kind}) ${catMark}${mismatch}`); + if (r.temperatureError && !r.temperatureObserved) + console.log(` err: ${r.temperatureError.slice(0, 160)}`); + } + + // ── Consolidated summary table ──────────────────────────────────────────── + const col = { + model: 38, + kind: 9, + tool: 5, + reason: 9, + attach: 7, + pdf: 5, + temp: 5, + input: 22, + }; + function pad(s: string, n: number) { return s.slice(0, n).padEnd(n); } + function yn(observed: boolean, catalog: boolean): string { + const v = observed ? "✓" : "✗"; + return observed !== catalog ? `${v}*` : ` ${v}`; // * = mismatch vs catalog + } + const totalWidth = col.model + col.kind + col.tool + col.reason + col.attach + col.pdf + col.temp + col.input + 16; + + console.log("\n" + "─".repeat(totalWidth)); + console.log( + pad("Model", col.model) + " " + + pad("Surface", col.kind) + " " + + pad("Tools", col.tool) + " " + + pad("Reasoning", col.reason) + " " + + pad("Image", col.attach) + " " + + pad("PDF", col.pdf) + " " + + pad("Temp", col.temp) + " " + + pad("Input modalities", col.input), + ); + console.log("─".repeat(totalWidth)); + + for (const r of rows) { + const input = r.catalog.modalities_input.join(" · ") || "text"; + if (r.kind === "embedding") { + console.log( + pad(r.model, col.model) + " " + + pad(r.kind, col.kind) + " " + + pad(" —", col.tool) + " " + + pad(" —", col.reason) + " " + + pad(" —", col.attach) + " " + + pad(" —", col.pdf) + " " + + pad(" —", col.temp) + " " + + input, + ); + } else { + console.log( + pad(r.model, col.model) + " " + + pad(r.kind, col.kind) + " " + + pad(yn(r.toolObserved, r.catalog.tool_call), col.tool) + " " + + pad(yn(r.reasoningObserved, r.catalog.reasoning), col.reason) + " " + + pad(yn(r.attachmentObserved, r.catalog.attachment), col.attach) + " " + + pad(yn(r.pdfObserved, r.catalog.pdf), col.pdf) + " " + + pad(yn(r.temperatureObserved, r.catalog.temperature), col.temp) + " " + + input, + ); + } + } + + console.log("─".repeat(totalWidth)); + console.log(" ✓ = supported ✗ = not supported * = mismatch vs TOML catalog"); +} + +main().catch((e) => { + console.error(e); + process.exit(1); +}); diff --git a/packages/core/script/test-databricks.ts b/packages/core/script/test-databricks.ts new file mode 100644 index 000000000..42aa4c3bc --- /dev/null +++ b/packages/core/script/test-databricks.ts @@ -0,0 +1,456 @@ +#!/usr/bin/env bun +/** + * models.dev — Databricks provider integration test (single script). + * + * Calls the live AI Gateway for every catalog-aligned route from discovery, using the + * same HTTP surfaces as `providers/databricks` TOMLs + `api.json`: + * mlflow/v1 chat & embeddings | openai/v1/responses | anthropic/v1/messages | gemini/.../generateContent + * + * Prerequisites: Databricks auth (~/.databrickscfg profile or env for @databricks/sdk-experimental). + * + * Repo root: + * bun run databricks:test-inference -- --profile YOUR_PROFILE + * + * Options: --profile NAME | --delay-ms N | --only MODEL_ID | --json | -h + */ + +import path from "node:path"; +import { readFile, readdir } from "node:fs/promises"; +import { WorkspaceClient } from "@databricks/sdk-experimental"; +import { + fetchFilteredGatewayRoutes, + mlflowOpenAiBaseUrl, +} from "./databricks-ai-gateway-shared.js"; + +const MODELS_DIR = path.join( + import.meta.dir, + "../../../providers/databricks/models", +); + +interface TestResult { + model: string; + kind: "chat" | "embedding" | "responses" | "anthropic" | "gemini"; + ok: boolean; + status?: number; + latencyMs?: number; + error?: string; + detail?: string; +} + +/** OpenAI Responses on the gateway host (not under …/mlflow/v1). */ +function openAiResponsesEndpoint(aiGatewayUrl: string): string { + return `${aiGatewayUrl.replace(/\/$/, "")}/openai/v1/responses`; +} + +/** Routes that use OpenAI Responses; gateway `name` values currently include the `-codex` segment. */ +function isOpenAiResponsesRoute(model: string): boolean { + return model.includes("-codex"); +} + +function isClaudeGatewayModel(model: string): boolean { + return model.includes("claude"); +} + +function isGeminiGatewayModel(model: string): boolean { + return model.includes("gemini"); +} + +async function loadEmbeddingModelIds(): Promise> { + const ids = new Set(); + const files = await readdir(MODELS_DIR); + for (const f of files) { + if (!f.endsWith(".toml")) continue; + const text = await readFile(path.join(MODELS_DIR, f), "utf8"); + if (/family\s*=\s*"text-embedding"/.test(text)) { + ids.add(f.replace(/\.toml$/, "")); + } + } + return ids; +} + +async function authHeaders(client: WorkspaceClient): Promise { + const h = new Headers(); + await client.config.authenticate(h); + return h; +} + +async function testChat( + base: string, + headers: Headers, + model: string, +): Promise<{ ok: boolean; status: number; error?: string; detail?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const t0 = performance.now(); + const res = await fetch(`${base}/chat/completions`, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + messages: [{ role: "user", content: "Reply with exactly: OK" }], + max_tokens: 4096, + }), + }); + const latencyMs = Math.round(performance.now() - t0); + const text = await res.text(); + if (!res.ok) { + return { + ok: false, + status: res.status, + error: text.slice(0, 500), + detail: `${latencyMs}ms`, + }; + } + let snippet = ""; + try { + const j = JSON.parse(text) as { + choices?: Array<{ message?: { content?: unknown } }>; + }; + const c = j.choices?.[0]?.message?.content; + snippet = + typeof c === "string" + ? c.slice(0, 80) + : c != null + ? JSON.stringify(c).slice(0, 80) + : ""; + } catch { + snippet = text.slice(0, 80); + } + return { ok: true, status: res.status, detail: `${latencyMs}ms ${snippet}` }; +} + +async function testResponses( + responsesUrl: string, + headers: Headers, + model: string, +): Promise<{ ok: boolean; status: number; error?: string; detail?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const t0 = performance.now(); + const res = await fetch(responsesUrl, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + input: "Reply with exactly: OK", + }), + }); + const latencyMs = Math.round(performance.now() - t0); + const text = await res.text(); + if (!res.ok) { + return { + ok: false, + status: res.status, + error: text.slice(0, 500), + detail: `${latencyMs}ms`, + }; + } + let snippet = ""; + try { + const j = JSON.parse(text) as { + output_text?: string; + output?: Array<{ + type?: string; + content?: Array<{ type?: string; text?: string }>; + }>; + }; + if (typeof j.output_text === "string") snippet = j.output_text.slice(0, 80); + else if (Array.isArray(j.output)) { + for (const block of j.output) { + if (block.type !== "message" || !block.content) continue; + for (const part of block.content) { + if (part.type === "output_text" && part.text) { + snippet = part.text.slice(0, 80); + break; + } + } + if (snippet) break; + } + } + if (!snippet) snippet = text.slice(0, 80); + } catch { + snippet = text.slice(0, 80); + } + return { ok: true, status: res.status, detail: `${latencyMs}ms ${snippet}` }; +} + +async function testEmbedding( + base: string, + headers: Headers, + model: string, +): Promise<{ ok: boolean; status: number; error?: string; detail?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const t0 = performance.now(); + const res = await fetch(`${base}/embeddings`, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + input: "test", + }), + }); + const latencyMs = Math.round(performance.now() - t0); + const text = await res.text(); + if (!res.ok) { + return { + ok: false, + status: res.status, + error: text.slice(0, 500), + detail: `${latencyMs}ms`, + }; + } + let dims = ""; + try { + const j = JSON.parse(text) as { + data?: Array<{ embedding?: number[] }>; + }; + const emb = j.data?.[0]?.embedding; + dims = emb ? `dim=${emb.length}` : "?"; + } catch { + dims = "?"; + } + return { ok: true, status: res.status, detail: `${latencyMs}ms ${dims}` }; +} + +function anthropicMessagesUrl(aiGatewayUrl: string): string { + return `${aiGatewayUrl.replace(/\/$/, "")}/anthropic/v1/messages`; +} + +async function testAnthropicMessages( + url: string, + headers: Headers, + model: string, +): Promise<{ ok: boolean; status: number; error?: string; detail?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + h.set("anthropic-version", "2023-06-01"); + const t0 = performance.now(); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + model, + max_tokens: 256, + messages: [{ role: "user", content: "Reply with exactly: OK" }], + }), + }); + const latencyMs = Math.round(performance.now() - t0); + const text = await res.text(); + if (!res.ok) { + return { + ok: false, + status: res.status, + error: text.slice(0, 500), + detail: `${latencyMs}ms`, + }; + } + let snippet = ""; + try { + const j = JSON.parse(text) as { + content?: Array<{ type?: string; text?: string }>; + }; + const block = j.content?.find((c) => c.type === "text" && c.text); + snippet = block?.text?.slice(0, 80) ?? text.slice(0, 80); + } catch { + snippet = text.slice(0, 80); + } + return { ok: true, status: res.status, detail: `${latencyMs}ms ${snippet}` }; +} + +function geminiGenerateUrl(aiGatewayUrl: string, model: string): string { + const base = aiGatewayUrl.replace(/\/$/, ""); + return `${base}/gemini/v1beta/models/${model}:generateContent`; +} + +async function testGeminiGenerate( + url: string, + headers: Headers, +): Promise<{ ok: boolean; status: number; error?: string; detail?: string }> { + const h = new Headers(headers); + h.set("Content-Type", "application/json"); + const t0 = performance.now(); + const res = await fetch(url, { + method: "POST", + headers: h, + body: JSON.stringify({ + contents: [ + { + role: "user", + parts: [{ text: "Reply with exactly: OK" }], + }, + ], + generationConfig: { maxOutputTokens: 256 }, + }), + }); + const latencyMs = Math.round(performance.now() - t0); + const text = await res.text(); + if (!res.ok) { + return { + ok: false, + status: res.status, + error: text.slice(0, 500), + detail: `${latencyMs}ms`, + }; + } + let snippet = ""; + try { + const j = JSON.parse(text) as { + candidates?: Array<{ + content?: { parts?: Array<{ text?: string }> }; + }>; + }; + const parts = j.candidates?.[0]?.content?.parts; + const t = parts?.find((p) => p.text)?.text; + snippet = t?.slice(0, 80) ?? text.slice(0, 80); + } catch { + snippet = text.slice(0, 80); + } + return { ok: true, status: res.status, detail: `${latencyMs}ms ${snippet}` }; +} + +function parseArgs() { + const argv = process.argv.slice(2); + let profile = process.env.DATABRICKS_CONFIG_PROFILE; + let delayMs = 400; + let only: string | undefined; + let jsonOut = false; + for (let i = 0; i < argv.length; i++) { + const a = argv[i]; + if (a === "--profile" && argv[i + 1]) { + profile = argv[++i]; + continue; + } + if (a === "--delay-ms" && argv[i + 1]) { + delayMs = Number(argv[++i]); + continue; + } + if (a === "--only" && argv[i + 1]) { + only = argv[++i]; + continue; + } + if (a === "--json") { + jsonOut = true; + continue; + } + if (a === "--help" || a === "-h") { + console.log(`Usage: bun run databricks:test-inference -- [options] + + --profile NAME Databricks profile (~/.databrickscfg) + --delay-ms N Delay between requests (default 400) + --only MODEL_ID Single gateway model id + --json JSON summary on stdout at end +`); + process.exit(0); + } + } + return { profile, delayMs, only, jsonOut }; +} + +function sleep(ms: number) { + return new Promise((r) => setTimeout(r, ms)); +} + +async function main() { + const { profile, delayMs, only, jsonOut } = parseArgs(); + + const client = new WorkspaceClient(profile ? { profile } : {}); + const routes = await fetchFilteredGatewayRoutes(client); + + const gatewayUrl = routes[0]?.ai_gateway_url; + if (!gatewayUrl) { + throw new Error("No AI Gateway URL on endpoints; cannot build mlflow base."); + } + const mlflowBase = mlflowOpenAiBaseUrl(gatewayUrl); + const responsesUrl = openAiResponsesEndpoint(gatewayUrl); + const anthropicUrl = anthropicMessagesUrl(gatewayUrl); + const headers = await authHeaders(client); + + const embeddingIds = await loadEmbeddingModelIds(); + let toRun = routes; + if (only) { + toRun = routes.filter((r) => r.gateway_name === only); + if (toRun.length === 0) { + throw new Error(`Model not in filtered routes: ${only}`); + } + } + + const results: TestResult[] = []; + console.log(`Gateway mlflow base: ${mlflowBase}`); + console.log(`Gateway responses: ${responsesUrl}`); + console.log(`Gateway anthropic: ${anthropicUrl}`); + console.log(`Models to test: ${toRun.length}\n`); + + for (const r of toRun) { + const model = r.gateway_name; + const kind: TestResult["kind"] = embeddingIds.has(model) + ? "embedding" + : isOpenAiResponsesRoute(model) + ? "responses" + : isClaudeGatewayModel(model) + ? "anthropic" + : isGeminiGatewayModel(model) + ? "gemini" + : "chat"; + process.stdout.write(`${kind.padEnd(10)} ${model} ... `); + const out = + kind === "embedding" + ? await testEmbedding(mlflowBase, headers, model) + : kind === "responses" + ? await testResponses(responsesUrl, headers, model) + : kind === "anthropic" + ? await testAnthropicMessages(anthropicUrl, headers, model) + : kind === "gemini" + ? await testGeminiGenerate( + geminiGenerateUrl(gatewayUrl, model), + headers, + ) + : await testChat(mlflowBase, headers, model); + const lat = out.detail?.match(/^(\d+)ms/); + const tr: TestResult = { + model, + kind, + ok: out.ok, + status: out.status, + latencyMs: lat ? Number(lat[1]) : undefined, + error: out.error, + detail: out.detail, + }; + results.push(tr); + console.log(out.ok ? `OK ${out.detail}` : `FAIL ${out.status} ${out.error?.slice(0, 120)}`); + if (delayMs > 0) await sleep(delayMs); + } + + const failed = results.filter((x) => !x.ok); + console.log(`\n---\nPassed: ${results.length - failed.length}/${results.length}`); + if (failed.length) { + console.log("Failed:"); + for (const f of failed) { + console.log(` ${f.model} (${f.kind}) ${f.status}: ${f.error?.slice(0, 200)}`); + } + } + + if (jsonOut) { + console.log( + JSON.stringify( + { + base: mlflowBase, + responsesUrl, + anthropicUrl, + results, + passed: results.length - failed.length, + total: results.length, + }, + null, + 2, + ), + ); + } + + if (failed.length > 0) process.exit(1); +} + +main().catch((e) => { + console.error(e); + process.exit(1); +}); diff --git a/packages/core/src/schema.ts b/packages/core/src/schema.ts index d7f042361..7b9c3cff6 100644 --- a/packages/core/src/schema.ts +++ b/packages/core/src/schema.ts @@ -147,13 +147,15 @@ export const Provider = z .refine( (data) => { const isOpenAI = data.npm === "@ai-sdk/openai"; - const isOpenAIcompatible = data.npm === "@ai-sdk/openai-compatible"; + const isOpenAIcompatible = + data.npm === "@ai-sdk/openai-compatible" || + data.npm === "@databricks/ai-sdk-provider"; const isOpenrouter = data.npm === "@openrouter/ai-sdk-provider"; const isAnthropic = data.npm === "@ai-sdk/anthropic"; const hasApi = data.api !== undefined; return ( - // openai-compatible: must have api + // openai-compatible (incl. Databricks AI SDK provider): must have api (isOpenAIcompatible && hasApi) || // openrouter: must have api (isOpenrouter && hasApi) || @@ -171,7 +173,7 @@ export const Provider = z }, { message: - "'api' is required for openai-compatible and openrouter, optional for anthropic and openai, forbidden otherwise", + "'api' is required for @ai-sdk/openai-compatible, @databricks/ai-sdk-provider, and openrouter; optional for anthropic and openai; forbidden otherwise", path: ["api"], }, ); diff --git a/providers/databricks/README.md b/providers/databricks/README.md new file mode 100644 index 000000000..4af1df098 --- /dev/null +++ b/providers/databricks/README.md @@ -0,0 +1,226 @@ +# Databricks ([models.dev](http://models.dev)) + +Enterprise catalog metadata for **Databricks Foundation Model APIs** exposed through **Databricks AI Gateway**. Published data appears in `**api.json`** after site generation: integrators read `**npm**`, `**api**`, optional per-model `**provider**`, and capability fields to configure production clients—without embedding workspace secrets in source control. + +--- + +## Scope + + +| Item | Definition | +| -------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **What is catalogued** | AI Gateway routes whose names begin `**databricks-`** and that reference Unity Catalog `**system.ai.***` pay-per-token foundation model destinations. | +| **Default contract** | OpenAI-compatible **chat** and **embeddings** on `**…/mlflow/v1`** via `**@databricks/ai-sdk-provider**` ([npm](https://www.npmjs.com/package/@databricks/ai-sdk-provider)) (`[provider.toml](./provider.toml)`). | +| **Native vendor surfaces** | Selected models override `**npm`** / `**api**` for **Anthropic Messages**, **Gemini `generateContent`**, and **OpenAI Responses**—all on the **same `ai_gateway_url` host** as MLflow. | + + +--- + +## Stakeholders + + +| Audience | Value | +| ------------------------- | ------------------------------------------------------------------------------------------------------------------------ | +| **Application teams** | Stable model identifiers, modality and limit metadata, and SDK package hints from `**api.json`**. | +| **Platform engineering** | Documented discovery flow, HTTP layout on AI Gateway, and alignment with Databricks access controls. | +| **Security & compliance** | Repository holds **templates and metadata only**; credentials and tokens remain in your vault and runtime configuration. | + + +--- + +## Databricks product references + +- [AI Gateway](https://docs.databricks.com/en/generative-ai/ai-gateway/index.html) +- [Foundation Model APIs](https://docs.databricks.com/en/machine-learning/foundation-models/index.html) +- [Provider native APIs](https://docs.databricks.com/aws/en/machine-learning/model-serving/provider-native-apis) + +--- + +## Logical architecture + +```text + Client (SDK) Databricks AI Gateway (host = ai_gateway_url) + │ ┌──────────────────────────────────────────────┐ + │ Authorization: Bearer │ /mlflow/v1/* OpenAI-compatible │ + └────────────────────────►│ /anthropic/v1/messages Claude │ + │ /gemini/v1beta/... Gemini │ + │ /openai/v1/responses OpenAI Responses │ + └──────────────────────────────────────────────┘ + │ ▲ + │ GET /api/ai-gateway/v2/endpoints │ + └────────────────────────────────────┘ + Workspace REST API +``` + + +| Concern | Owner | +| -------------- | ------------------------------------------------------------------------------------------------------------------------ | +| `**api.json**` | Declares `**npm**`, `**api**`, optional `**models[id].provider**`, capabilities, limits. | +| **Runtime** | Substitutes `**`**, performs discovery, attaches **Bearer** tokens, applies optional name aliases. | +| **Databricks** | Publishes `**name`** on each gateway route; clients send that string as the vendor `**model**` unless remapped. | + + +--- + +## Discovery and identifiers + +**Discovery** + +```http +GET https:///api/ai-gateway/v2/endpoints +``` + +Each item includes `**ai_gateway_url**`. Combine that host with each model’s `**api**` path from `**api.json**` (default `**/mlflow/v1**`, or the model-level `**provider.api**` suffix). + +**Identifiers** + +- **Catalog model ID** — Relative path under `**models/`** without `**.toml**` (example: `databricks-claude-sonnet-4-6`). +- **Wire name** — Ordinarily identical to the catalog ID and to the gateway `**name`** returned by discovery. + +**Eligibility for inclusion in this folder** + +1. Gateway `**name`** prefix `**databricks-**`. +2. At least one `**config.destinations[]**` with `**type**` = `**PAY_PER_TOKEN_FOUNDATION_MODEL**` and `**name**` starting `**system.ai.**`. + +**Reference implementation:** [databricks-ai-bridge](https://github.com/databricks/databricks-ai-bridge) (gateway host resolution and OpenAI client patterns). + +--- + +## HTTP layout on AI Gateway + +Let `**G`** = `https://.ai-gateway.cloud.databricks.com` (from `**ai_gateway_url**`). All calls use one **Bearer** access token. + + +| Traffic class | `npm` (from `api.json`) | `api` base (template) | Primary HTTP operations | +| -------------------------------- | --------------------------- | --------------------- | ------------------------------------------------------------------------------------------------- | +| General chat & OSS / open models | `@databricks/ai-sdk-provider` | `**G/mlflow/v1`** | `POST …/chat/completions` | +| Embeddings | *(same default)* | `**G/mlflow/v1**` | `POST …/embeddings` | +| Claude | `@ai-sdk/anthropic` | `**G/anthropic**` | `POST …/anthropic/v1/messages` (supply `**anthropic-version**` per Anthropic client requirements) | +| Gemini | `@ai-sdk/google` | `**G/gemini**` | `POST …/gemini/v1beta/models/:generateContent` | +| OpenAI Responses | `@databricks/ai-sdk-provider` | `**G/openai/v1**` | `POST …/openai/v1/responses` (TOML includes `**shape = "responses"**`; use provider `**responses**` in code) | + + +Per-model overrides live under `**[provider]**` in the corresponding `**models/*.toml**` files; all other rows inherit `[provider.toml](./provider.toml)`. + +--- + +## Authentication and environment + +```text +Authorization: Bearer +``` + + +| Credential style | Typical inputs | +| ----------------------- | -------------------------------------------------- | +| Personal access token | `DATABRICKS_TOKEN`, workspace URL | +| User or delegated OAuth | Access token from your approved OAuth flow | +| Service principal | `DATABRICKS_CLIENT_ID`, `DATABRICKS_CLIENT_SECRET` | + + +Environment keys commonly used with Databricks tooling are listed in `[provider.toml](./provider.toml)`. Authoritative OAuth and unified-authentication guidance is maintained by Databricks: start from **[Authorize access to Databricks resources](https://docs.databricks.com/aws/en/dev-tools/auth/)** (select your cloud if not on AWS). + +--- + +## Consuming `api.json` + +1. For each `**databricks.models[modelId]`**, read `**npm**`, `**api**`, and optional `**provider**` (when present, `**provider**` overrides the provider root for that model). +2. Resolve `**G**` via discovery or by substituting `****`. +3. Instantiate the SDK that matches `**npm**` and point it at the resolved base URL per that SDK’s conventions. +4. Pass the gateway `**name**` (usually `**modelId**`) as the vendor `**model**` parameter unless your organization uses an alias table. + +**Example — default MLflow chat (TypeScript)** + +```typescript +import { createDatabricksProvider } from "@databricks/ai-sdk-provider"; +import { generateText } from "ai"; + +const baseURL = + `https://${workspaceNumericId}.ai-gateway.cloud.databricks.com/mlflow/v1`; + +const databricks = createDatabricksProvider({ + baseURL, + headers: { Authorization: `Bearer ${token}` }, +}); + +const wireId = aliases[catalogModelId] ?? catalogModelId; + +await generateText({ + model: databricks.chatCompletions(wireId), + prompt: "Hello", +}); +``` + +**Example — default MLflow chat (Python)** + +```python +from openai import OpenAI + +base_url = f"https://{workspace_numeric_id}.ai-gateway.cloud.databricks.com/mlflow/v1" +wire_id = aliases.get(catalog_model_id, catalog_model_id) + +client = OpenAI(api_key=token, base_url=base_url) +resp = client.chat.completions.create( + model=wire_id, + messages=[{"role": "user", "content": "Hello"}], +) +``` + +Claude and Gemini rows use `**@ai-sdk/anthropic**` and `**@ai-sdk/google**` respectively. MLflow chat/embeddings and OpenAI Responses use `**@databricks/ai-sdk-provider**` with the `**api**` templates in `**api.json**` (Responses: set `**baseURL**` to the resolved `**/openai/v1**` host and call `**responses(modelId)**` on the provider instance). + +--- + +## Model catalog + +- **Count:** 36 models (catalog review date `**2026-04-11`**). +- **Availability** depends on region, workspace entitlements, and `**system.ai.*`** registration—validate with discovery in each target workspace. +- **Embeddings** are tagged `**family = "text-embedding"`**; product UIs that list only chat models should exclude that family. + +--- + +## Catalog field policy + +Maintain `**release_date**`, `**last_updated**`, `**[limit]**`, and capability flags in line with [Foundation Model APIs](https://docs.databricks.com/en/machine-learning/foundation-models/index.html) and your deployment. **Unit pricing is not represented** in these TOMLs: FMA economics are account- and contract-specific on Databricks; downstream systems should source commercial terms from your billing and procurement tools. + +--- + +## Development and verification + +**Repository checks (no workspace access required)** + +```bash +bun install +bun validate +cd packages/web && bun run build +``` + +**Workspace-backed scripts** (Databricks authentication required, e.g. `**~/.databrickscfg`** profile or equivalent env for `**@databricks/sdk-experimental**`) + + +| npm script | Purpose | +| -------------------------------------------------------- | --------------------------------------------------------------------------------------------- | +| `bun run databricks:list-gateway -- --profile PROFILE` | Lists or `**--json**` exports routes matching this catalog’s filter. | +| `bun run databricks:test-inference -- --profile PROFILE` | Exercises each catalog route against AI Gateway (`--only`, `--delay-ms`, `--json` supported). | + + +Scripts: `[list-databricks-ai-gateway.ts](../../packages/core/script/list-databricks-ai-gateway.ts)`, `[test-databricks.ts](../../packages/core/script/test-databricks.ts)`. + +--- + +## Upstream contribution + +Changes intended for **[anomalyco/models.dev](https://github.com/anomalyco/models.dev)** follow the [repository contributing guide](../../README.md#contributing). Preserve the `**api`** / `**[provider]**` layout described here, keep secrets out of the tree, and run `**bun validate**` before submitting. + +--- + +## Additional references + + +| Resource | URL | +| ------------------------------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Foundation Model API reference | [https://docs.databricks.com/en/machine-learning/foundation-models/api-reference.html](https://docs.databricks.com/en/machine-learning/foundation-models/api-reference.html) | +| Databricks authentication and OAuth (PAT, user OAuth, service principal M2M, unified auth) | [https://docs.databricks.com/aws/en/dev-tools/auth/](https://docs.databricks.com/aws/en/dev-tools/auth/) | +| models.dev API | [https://models.dev/api.json](https://models.dev/api.json) | +| Provider logo (after site build) | [https://models.dev/logos/databricks.svg](https://models.dev/logos/databricks.svg) | + + diff --git a/providers/databricks/logo.svg b/providers/databricks/logo.svg new file mode 100644 index 000000000..ab7e64bb8 --- /dev/null +++ b/providers/databricks/logo.svg @@ -0,0 +1,3 @@ + + + diff --git a/providers/databricks/models/databricks-bge-large-en.toml b/providers/databricks/models/databricks-bge-large-en.toml new file mode 100644 index 000000000..b968463aa --- /dev/null +++ b/providers/databricks/models/databricks-bge-large-en.toml @@ -0,0 +1,17 @@ +name = "Databricks BGE Large English" +family = "text-embedding" +release_date = "2024-09-01" +last_updated = "2026-04-11" +attachment = false +reasoning = false +tool_call = false +temperature = false +open_weights = true + +[limit] +context = 8192 +output = 1024 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/databricks/models/databricks-claude-3-7-sonnet.toml b/providers/databricks/models/databricks-claude-3-7-sonnet.toml new file mode 100644 index 000000000..a023bf42c --- /dev/null +++ b/providers/databricks/models/databricks-claude-3-7-sonnet.toml @@ -0,0 +1,21 @@ +name = "Databricks Claude 3.7 Sonnet" +family = "claude-sonnet" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 200000 +output = 64000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/anthropic" +api = "https://.ai-gateway.cloud.databricks.com/anthropic" diff --git a/providers/databricks/models/databricks-claude-haiku-4-5.toml b/providers/databricks/models/databricks-claude-haiku-4-5.toml new file mode 100644 index 000000000..e76eb7853 --- /dev/null +++ b/providers/databricks/models/databricks-claude-haiku-4-5.toml @@ -0,0 +1,21 @@ +name = "Databricks Claude Haiku 4.5" +family = "claude-haiku" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 200000 +output = 64000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/anthropic" +api = "https://.ai-gateway.cloud.databricks.com/anthropic" diff --git a/providers/databricks/models/databricks-claude-opus-4-1.toml b/providers/databricks/models/databricks-claude-opus-4-1.toml new file mode 100644 index 000000000..95d2bafea --- /dev/null +++ b/providers/databricks/models/databricks-claude-opus-4-1.toml @@ -0,0 +1,21 @@ +name = "Databricks Claude Opus 4.1" +family = "claude-opus" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 200000 +output = 64000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/anthropic" +api = "https://.ai-gateway.cloud.databricks.com/anthropic" diff --git a/providers/databricks/models/databricks-claude-opus-4-5.toml b/providers/databricks/models/databricks-claude-opus-4-5.toml new file mode 100644 index 000000000..e2bd1990a --- /dev/null +++ b/providers/databricks/models/databricks-claude-opus-4-5.toml @@ -0,0 +1,21 @@ +name = "Databricks Claude Opus 4.5" +family = "claude-opus" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 200000 +output = 64000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/anthropic" +api = "https://.ai-gateway.cloud.databricks.com/anthropic" diff --git a/providers/databricks/models/databricks-claude-opus-4-6.toml b/providers/databricks/models/databricks-claude-opus-4-6.toml new file mode 100644 index 000000000..b813e7fbd --- /dev/null +++ b/providers/databricks/models/databricks-claude-opus-4-6.toml @@ -0,0 +1,21 @@ +name = "Databricks Claude Opus 4.6" +family = "claude-opus" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 200000 +output = 64000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/anthropic" +api = "https://.ai-gateway.cloud.databricks.com/anthropic" diff --git a/providers/databricks/models/databricks-claude-sonnet-4-5.toml b/providers/databricks/models/databricks-claude-sonnet-4-5.toml new file mode 100644 index 000000000..ed095d91e --- /dev/null +++ b/providers/databricks/models/databricks-claude-sonnet-4-5.toml @@ -0,0 +1,21 @@ +name = "Databricks Claude Sonnet 4.5" +family = "claude-sonnet" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 200000 +output = 64000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/anthropic" +api = "https://.ai-gateway.cloud.databricks.com/anthropic" diff --git a/providers/databricks/models/databricks-claude-sonnet-4-6.toml b/providers/databricks/models/databricks-claude-sonnet-4-6.toml new file mode 100644 index 000000000..5c31f1a7f --- /dev/null +++ b/providers/databricks/models/databricks-claude-sonnet-4-6.toml @@ -0,0 +1,21 @@ +name = "Databricks Claude Sonnet 4.6" +family = "claude-sonnet" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 200000 +output = 64000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/anthropic" +api = "https://.ai-gateway.cloud.databricks.com/anthropic" diff --git a/providers/databricks/models/databricks-claude-sonnet-4.toml b/providers/databricks/models/databricks-claude-sonnet-4.toml new file mode 100644 index 000000000..2c8c50e15 --- /dev/null +++ b/providers/databricks/models/databricks-claude-sonnet-4.toml @@ -0,0 +1,21 @@ +name = "Databricks Claude Sonnet 4" +family = "claude-sonnet" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 200000 +output = 64000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/anthropic" +api = "https://.ai-gateway.cloud.databricks.com/anthropic" diff --git a/providers/databricks/models/databricks-gemini-2-5-flash.toml b/providers/databricks/models/databricks-gemini-2-5-flash.toml new file mode 100644 index 000000000..f25da51d5 --- /dev/null +++ b/providers/databricks/models/databricks-gemini-2-5-flash.toml @@ -0,0 +1,21 @@ +name = "Databricks Gemini 2.5 Flash" +family = "gemini-flash" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 1048576 +output = 65536 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/google" +api = "https://.ai-gateway.cloud.databricks.com/gemini" diff --git a/providers/databricks/models/databricks-gemini-2-5-pro.toml b/providers/databricks/models/databricks-gemini-2-5-pro.toml new file mode 100644 index 000000000..916310caa --- /dev/null +++ b/providers/databricks/models/databricks-gemini-2-5-pro.toml @@ -0,0 +1,21 @@ +name = "Databricks Gemini 2.5 Pro" +family = "gemini-pro" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 1048576 +output = 65536 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/google" +api = "https://.ai-gateway.cloud.databricks.com/gemini" diff --git a/providers/databricks/models/databricks-gemini-3-1-flash-lite.toml b/providers/databricks/models/databricks-gemini-3-1-flash-lite.toml new file mode 100644 index 000000000..81261e610 --- /dev/null +++ b/providers/databricks/models/databricks-gemini-3-1-flash-lite.toml @@ -0,0 +1,21 @@ +name = "Databricks Gemini 3.1 Flash Lite" +family = "gemini-flash-lite" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 1048576 +output = 65536 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/google" +api = "https://.ai-gateway.cloud.databricks.com/gemini" diff --git a/providers/databricks/models/databricks-gemini-3-1-pro.toml b/providers/databricks/models/databricks-gemini-3-1-pro.toml new file mode 100644 index 000000000..b672abdde --- /dev/null +++ b/providers/databricks/models/databricks-gemini-3-1-pro.toml @@ -0,0 +1,21 @@ +name = "Databricks Gemini 3.1 Pro" +family = "gemini-pro" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 1048576 +output = 65536 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/google" +api = "https://.ai-gateway.cloud.databricks.com/gemini" diff --git a/providers/databricks/models/databricks-gemini-3-flash.toml b/providers/databricks/models/databricks-gemini-3-flash.toml new file mode 100644 index 000000000..ccde00072 --- /dev/null +++ b/providers/databricks/models/databricks-gemini-3-flash.toml @@ -0,0 +1,21 @@ +name = "Databricks Gemini 3 Flash" +family = "gemini-flash" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 1048576 +output = 65536 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/google" +api = "https://.ai-gateway.cloud.databricks.com/gemini" diff --git a/providers/databricks/models/databricks-gemini-3-pro.toml b/providers/databricks/models/databricks-gemini-3-pro.toml new file mode 100644 index 000000000..56b2b396e --- /dev/null +++ b/providers/databricks/models/databricks-gemini-3-pro.toml @@ -0,0 +1,21 @@ +name = "Databricks Gemini 3 Pro" +family = "gemini-pro" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 1048576 +output = 65536 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@ai-sdk/google" +api = "https://.ai-gateway.cloud.databricks.com/gemini" diff --git a/providers/databricks/models/databricks-gemma-3-12b.toml b/providers/databricks/models/databricks-gemma-3-12b.toml new file mode 100644 index 000000000..7a018848e --- /dev/null +++ b/providers/databricks/models/databricks-gemma-3-12b.toml @@ -0,0 +1,17 @@ +name = "Databricks Gemma 3 12B" +family = "gemma" +release_date = "2025-03-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = true + +[limit] +context = 131072 +output = 8192 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gpt-5-1-codex-max.toml b/providers/databricks/models/databricks-gpt-5-1-codex-max.toml new file mode 100644 index 000000000..274161086 --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-1-codex-max.toml @@ -0,0 +1,22 @@ +name = "Databricks GPT-5.1 Codex Max" +family = "gpt-codex" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@databricks/ai-sdk-provider" +api = "https://.ai-gateway.cloud.databricks.com/openai/v1" +shape = "responses" diff --git a/providers/databricks/models/databricks-gpt-5-1-codex-mini.toml b/providers/databricks/models/databricks-gpt-5-1-codex-mini.toml new file mode 100644 index 000000000..eea0966d7 --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-1-codex-mini.toml @@ -0,0 +1,22 @@ +name = "Databricks GPT-5.1 Codex Mini" +family = "gpt-codex-mini" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@databricks/ai-sdk-provider" +api = "https://.ai-gateway.cloud.databricks.com/openai/v1" +shape = "responses" diff --git a/providers/databricks/models/databricks-gpt-5-1.toml b/providers/databricks/models/databricks-gpt-5-1.toml new file mode 100644 index 000000000..cca6c777f --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-1.toml @@ -0,0 +1,17 @@ +name = "Databricks GPT-5.1" +family = "gpt" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gpt-5-2-codex.toml b/providers/databricks/models/databricks-gpt-5-2-codex.toml new file mode 100644 index 000000000..0510b0502 --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-2-codex.toml @@ -0,0 +1,22 @@ +name = "Databricks GPT-5.2 Codex" +family = "gpt-codex" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@databricks/ai-sdk-provider" +api = "https://.ai-gateway.cloud.databricks.com/openai/v1" +shape = "responses" diff --git a/providers/databricks/models/databricks-gpt-5-2.toml b/providers/databricks/models/databricks-gpt-5-2.toml new file mode 100644 index 000000000..6489282a2 --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-2.toml @@ -0,0 +1,17 @@ +name = "Databricks GPT-5.2" +family = "gpt" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gpt-5-3-codex.toml b/providers/databricks/models/databricks-gpt-5-3-codex.toml new file mode 100644 index 000000000..1d79254e5 --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-3-codex.toml @@ -0,0 +1,22 @@ +name = "Databricks GPT-5.3 Codex" +family = "gpt-codex" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image", "pdf"] +output = ["text"] + +[provider] +npm = "@databricks/ai-sdk-provider" +api = "https://.ai-gateway.cloud.databricks.com/openai/v1" +shape = "responses" diff --git a/providers/databricks/models/databricks-gpt-5-4-mini.toml b/providers/databricks/models/databricks-gpt-5-4-mini.toml new file mode 100644 index 000000000..63f283f21 --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-4-mini.toml @@ -0,0 +1,17 @@ +name = "Databricks GPT-5.4 Mini" +family = "gpt-mini" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gpt-5-4-nano.toml b/providers/databricks/models/databricks-gpt-5-4-nano.toml new file mode 100644 index 000000000..3f8f5aefd --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-4-nano.toml @@ -0,0 +1,17 @@ +name = "Databricks GPT-5.4 Nano" +family = "gpt-nano" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gpt-5-4.toml b/providers/databricks/models/databricks-gpt-5-4.toml new file mode 100644 index 000000000..99c4f3970 --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-4.toml @@ -0,0 +1,17 @@ +name = "Databricks GPT-5.4" +family = "gpt" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gpt-5-mini.toml b/providers/databricks/models/databricks-gpt-5-mini.toml new file mode 100644 index 000000000..5bdbc581a --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-mini.toml @@ -0,0 +1,17 @@ +name = "Databricks GPT-5 Mini" +family = "gpt-mini" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gpt-5-nano.toml b/providers/databricks/models/databricks-gpt-5-nano.toml new file mode 100644 index 000000000..d9c3537ec --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5-nano.toml @@ -0,0 +1,17 @@ +name = "Databricks GPT-5 Nano" +family = "gpt-nano" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gpt-5.toml b/providers/databricks/models/databricks-gpt-5.toml new file mode 100644 index 000000000..561ae3922 --- /dev/null +++ b/providers/databricks/models/databricks-gpt-5.toml @@ -0,0 +1,17 @@ +name = "Databricks GPT-5" +family = "gpt" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = false +open_weights = false + +[limit] +context = 400000 +output = 128000 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gpt-oss-120b.toml b/providers/databricks/models/databricks-gpt-oss-120b.toml new file mode 100644 index 000000000..05eaf69b9 --- /dev/null +++ b/providers/databricks/models/databricks-gpt-oss-120b.toml @@ -0,0 +1,17 @@ +name = "Databricks GPT-OSS 120B" +family = "gpt-oss" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = true + +[limit] +context = 128000 +output = 16384 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gpt-oss-20b.toml b/providers/databricks/models/databricks-gpt-oss-20b.toml new file mode 100644 index 000000000..b8bcc794f --- /dev/null +++ b/providers/databricks/models/databricks-gpt-oss-20b.toml @@ -0,0 +1,17 @@ +name = "Databricks GPT-OSS 20B" +family = "gpt-oss" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = true + +[limit] +context = 128000 +output = 16384 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/databricks/models/databricks-gte-large-en.toml b/providers/databricks/models/databricks-gte-large-en.toml new file mode 100644 index 000000000..cb18a4965 --- /dev/null +++ b/providers/databricks/models/databricks-gte-large-en.toml @@ -0,0 +1,17 @@ +name = "Databricks GTE Large English" +family = "text-embedding" +release_date = "2024-09-01" +last_updated = "2026-04-11" +attachment = false +reasoning = false +tool_call = false +temperature = false +open_weights = true + +[limit] +context = 8192 +output = 1024 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/databricks/models/databricks-llama-4-maverick.toml b/providers/databricks/models/databricks-llama-4-maverick.toml new file mode 100644 index 000000000..e857264d8 --- /dev/null +++ b/providers/databricks/models/databricks-llama-4-maverick.toml @@ -0,0 +1,17 @@ +name = "Databricks Llama 4 Maverick" +family = "llama" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = true +reasoning = true +tool_call = true +temperature = true +open_weights = true + +[limit] +context = 128000 +output = 16384 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/databricks/models/databricks-meta-llama-3-1-8b-instruct.toml b/providers/databricks/models/databricks-meta-llama-3-1-8b-instruct.toml new file mode 100644 index 000000000..52a7b12e4 --- /dev/null +++ b/providers/databricks/models/databricks-meta-llama-3-1-8b-instruct.toml @@ -0,0 +1,17 @@ +name = "Databricks Meta Llama 3.1 8B Instruct" +family = "llama" +release_date = "2024-07-01" +last_updated = "2026-04-11" +attachment = false +reasoning = false +tool_call = true +temperature = true +open_weights = true + +[limit] +context = 131072 +output = 8192 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/databricks/models/databricks-meta-llama-3-3-70b-instruct.toml b/providers/databricks/models/databricks-meta-llama-3-3-70b-instruct.toml new file mode 100644 index 000000000..5f68d49a4 --- /dev/null +++ b/providers/databricks/models/databricks-meta-llama-3-3-70b-instruct.toml @@ -0,0 +1,17 @@ +name = "Databricks Meta Llama 3.3 70B Instruct" +family = "llama" +release_date = "2024-12-01" +last_updated = "2026-04-11" +attachment = false +reasoning = false +tool_call = true +temperature = true +open_weights = true + +[limit] +context = 131072 +output = 8192 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/databricks/models/databricks-qwen3-embedding-0-6b.toml b/providers/databricks/models/databricks-qwen3-embedding-0-6b.toml new file mode 100644 index 000000000..9dac76171 --- /dev/null +++ b/providers/databricks/models/databricks-qwen3-embedding-0-6b.toml @@ -0,0 +1,17 @@ +name = "Databricks Qwen3 Embedding 0.6B" +family = "text-embedding" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = false +reasoning = false +tool_call = false +temperature = false +open_weights = true + +[limit] +context = 32768 +output = 4096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/databricks/models/databricks-qwen3-next-80b-a3b-instruct.toml b/providers/databricks/models/databricks-qwen3-next-80b-a3b-instruct.toml new file mode 100644 index 000000000..c2f73e7eb --- /dev/null +++ b/providers/databricks/models/databricks-qwen3-next-80b-a3b-instruct.toml @@ -0,0 +1,17 @@ +name = "Databricks Qwen3 Next 80B A3B Instruct" +family = "qwen" +release_date = "2025-06-01" +last_updated = "2026-04-11" +attachment = false +reasoning = true +tool_call = true +temperature = true +open_weights = false + +[limit] +context = 256000 +output = 32768 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/databricks/provider.toml b/providers/databricks/provider.toml new file mode 100644 index 000000000..713735eb6 --- /dev/null +++ b/providers/databricks/provider.toml @@ -0,0 +1,13 @@ +name = "Databricks" +# Environment hints. Use the subset that matches each auth mode. Integrators substitute +# in `api` (or set baseURL after discovery—see README). +env = [ + "DATABRICKS_HOST", + "DATABRICKS_WORKSPACE_ID", + "DATABRICKS_TOKEN", + "DATABRICKS_CLIENT_ID", + "DATABRICKS_CLIENT_SECRET", +] +npm = "@databricks/ai-sdk-provider" +api = "https://.ai-gateway.cloud.databricks.com/mlflow/v1" +doc = "https://docs.databricks.com/en/machine-learning/foundation-models/api-reference.html"