Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
4d76382
chore: harden GitHub Actions supply chain security (#1365)
tejassp-db Mar 28, 2026
2217be8
fix: replace npm dependency with bash regex in PR title linting (#1375)
tejassp-db Mar 30, 2026
2f11abb
chore: migrate to hardened GHA runners and add JFrog PyPI proxy (#1384)
tejassp-db Apr 8, 2026
b0778e0
chore: add dependency cache for fork PR CI support (#1386)
sd-db Apr 11, 2026
5194f6c
fix: warm cache pre-commit and uv cache path bugs (#1389)
sd-db Apr 13, 2026
40bc0d6
fix: dependency cache bugs for fork PR CI (#1394)
sd-db Apr 13, 2026
ea6a384
chore: restrict dependabot to security updates only (#1395)
sd-db Apr 13, 2026
54e8040
fix: capability detection for named compute (`databricks_compute`) (#…
trouze Apr 14, 2026
e2a85e6
feat: Add Notebook-scoped packages for command submits or notebook jo…
fedemgp Apr 14, 2026
5927a32
fix: convert workflow job spec to SDK dataclasses before jobs.create(…
aarushisingh04 Apr 14, 2026
8611c72
fix: enable TestWorkflowJob functional test that was unreachable on a…
sd-db Apr 15, 2026
3d3a87d
fix: prevent duplicate alias in --empty mode for inline ref/source (#…
sd-db Apr 16, 2026
b46d987
chore: bump dbt-core upper bound to <1.11.9 (#1400)
sd-db Apr 16, 2026
482c5f8
feat(PECOBLR-2328): bump databricks-sql-connector upper bound to <4.1…
tejassp-db Apr 16, 2026
b1047b5
chore: Prep for 1.11.7 Release (#1405)
sd-db Apr 16, 2026
1c22d17
chore: sync upstream v1.11.7
alexeyegorov Apr 21, 2026
95d6f3c
chore: update last upstream sync tag to v1.11.7
alexeyegorov Apr 21, 2026
86a2e68
ci: remove Databricks-internal JFrog PyPI proxy and protected runners
alexeyegorov Apr 21, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .github/CODEOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,7 @@
# the repo. Unless a later match takes precedence, these
# users will be requested for review when someone opens a
# pull request.
* @sd-db @tejassp-db @benc-db
* @sd-db @tejassp-db @benc-db @jprakash-db

# Explicit rule for CI/CD workflow changes
/.github/workflows/ @sd-db @tejassp-db @benc-db @jprakash-db
8 changes: 0 additions & 8 deletions .github/ISSUE_TEMPLATE/dependabot.yml

This file was deleted.

48 changes: 48 additions & 0 deletions .github/actions/setup-jfrog-pypi/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
name: "Setup JFrog PyPI Proxy"
description: "Authenticate with JFrog via OIDC and configure uv to use JFrog as PyPI proxy"

runs:
using: "composite"
steps:
- name: Get JFrog OIDC token
shell: bash
run: |
set -euo pipefail

# Get GitHub OIDC ID token
ID_TOKEN=$(curl -sLS \
-H "User-Agent: actions/oidc-client" \
-H "Authorization: Bearer $ACTIONS_ID_TOKEN_REQUEST_TOKEN" \
"${ACTIONS_ID_TOKEN_REQUEST_URL}&audience=jfrog-github" | jq .value | tr -d '"')
echo "::add-mask::${ID_TOKEN}"

# Exchange for JFrog access token
ACCESS_TOKEN=$(curl -sLS -XPOST -H "Content-Type: application/json" \
"https://databricks.jfrog.io/access/api/v1/oidc/token" \
-d "{\"grant_type\": \"urn:ietf:params:oauth:grant-type:token-exchange\", \"subject_token_type\":\"urn:ietf:params:oauth:token-type:id_token\", \"subject_token\": \"${ID_TOKEN}\", \"provider_name\": \"github-actions\"}" | jq .access_token | tr -d '"')
echo "::add-mask::${ACCESS_TOKEN}"

if [ -z "$ACCESS_TOKEN" ] || [ "$ACCESS_TOKEN" = "null" ]; then
echo "FAIL: Could not extract JFrog access token"
exit 1
fi

echo "JFROG_ACCESS_TOKEN=${ACCESS_TOKEN}" >> "$GITHUB_ENV"
echo "JFrog OIDC token obtained successfully"

- name: Configure pip and uv to use JFrog PyPI proxy
shell: bash
run: |
set -euo pipefail
JFROG_PYPI_URL="https://gha-service-account:${JFROG_ACCESS_TOKEN}@databricks.jfrog.io/artifactory/api/pypi/db-pypi/simple"
echo "PIP_INDEX_URL=${JFROG_PYPI_URL}" >> "$GITHUB_ENV"
echo "UV_INDEX_URL=${JFROG_PYPI_URL}" >> "$GITHUB_ENV"

# Write pip.conf so subprocesses (hatch, pre-commit, virtualenv) also use JFrog
mkdir -p ~/.config/pip
cat > ~/.config/pip/pip.conf << EOF
[global]
index-url = ${JFROG_PYPI_URL}
EOF

echo "pip and uv configured to use JFrog registry"
39 changes: 39 additions & 0 deletions .github/actions/setup-python-deps/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
name: "Setup Python Dependencies"
description: |
Restores pre-cached Python dependencies and enables offline mode.
Outputs cache-hit so callers can fall back to setup-jfrog-pypi on miss.

outputs:
cache-hit:
description: "Whether the dependency cache was restored and offline mode enabled"
value: ${{ steps.uv-cache.outputs.cache-matched-key != '' }}

runs:
using: "composite"
steps:
- name: Restore uv and pip cache
id: uv-cache
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
with:
path: |
~/.cache/uv
~/.cache/pip
~/.cache/pip-wheelhouse
key: python-deps-${{ hashFiles('uv.lock', 'pyproject.toml') }}-latest
restore-keys: python-deps-${{ hashFiles('uv.lock', 'pyproject.toml') }}-

- name: Restore pre-commit cache
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
with:
path: ~/.cache/pre-commit
key: pre-commit-deps-${{ hashFiles('.pre-commit-config.yaml') }}-latest
restore-keys: pre-commit-deps-${{ hashFiles('.pre-commit-config.yaml') }}-

- name: Enable offline mode
if: steps.uv-cache.outputs.cache-matched-key != ''
shell: bash
run: |
echo "UV_OFFLINE=true" >> "$GITHUB_ENV"
echo "UV_INDEX_URL=https://databricks.jfrog.io/artifactory/api/pypi/db-pypi/simple" >> "$GITHUB_ENV"
echo "PIP_NO_INDEX=1" >> "$GITHUB_ENV"
echo "PIP_FIND_LINKS=$HOME/.cache/pip-wheelhouse" >> "$GITHUB_ENV"
17 changes: 17 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
version: 2
updates:
# Python dependencies — security updates only
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 0
rebase-strategy: "disabled"

# GitHub Actions — security updates only
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 0
rebase-strategy: "disabled"
2 changes: 1 addition & 1 deletion .github/last-upstream-sync-tag
Original file line number Diff line number Diff line change
@@ -1 +1 @@
v1.11.6
v1.11.7
31 changes: 10 additions & 21 deletions .github/workflows/ci-pr-linting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,34 +15,23 @@ jobs:
pr-title:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Setup node
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install conventional commit parser
shell: bash
run: npm install --global conventional-commits-parser

- name: Validate PR title
id: pr-format
shell: bash
env:
PR_TITLE: ${{ github.event.pull_request.title }}
# language=bash
run: |
echo "PR title: ${PR_TITLE}"
# check if PR title follows conventional commits format
# issue on parser does not support "!" for breaking change (https://github.com/conventional-changelog/conventional-changelog/issues/648)
# so we override the regex to support it
conventionalCommitResult=$(echo "${PR_TITLE}" | conventional-commits-parser -p "^(\w*)!?(?:\(([\w\$\.\-\* ]*)\))?\: (.*)$" | jq ".[].type")
if [[ "${conventionalCommitResult}" != "null" ]]; then
echo "Conventional commit type: ${conventionalCommitResult}"

# Validate PR title follows conventional commits format
# Pattern: type[!][(scope)]: description
# Examples: feat(JIRA-123): add feature, fix!: breaking change
REGEX='^[a-zA-Z]+!?(\([^)]*\))?\: .+'
if [[ "${PR_TITLE}" =~ $REGEX ]]; then
echo "Valid conventional commit format"
exit 0
fi

echo "Invalid PR title"
exit 1

Expand All @@ -54,7 +43,7 @@ jobs:
message: |
Hey there and thank you for opening this pull request! :wave:
We require pull request titles to follow the [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/).

Examples:
- `feat(JIRA-123): My awesome feature`
- `fix: My awesome fix`
Expand All @@ -65,4 +54,4 @@ jobs:
if: success()
with:
header: pr-title-lint-error
delete: true
delete: true
2 changes: 1 addition & 1 deletion .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
# DO NOT run actions/checkout here, for security reasons
# For details, refer to https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
- name: Post comment
uses: py-cov-action/python-coverage-comment-action@v3
uses: py-cov-action/python-coverage-comment-action@7188638f871f721a365d644f505d1ff3df20d683 # v3
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_PR_RUN_ID: ${{ github.event.workflow_run.id }}
36 changes: 21 additions & 15 deletions .github/workflows/integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ on:
required: false
type: string

permissions:
contents: read

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
Expand All @@ -41,9 +44,10 @@ jobs:
DBT_DATABRICKS_UC_INITIAL_CATALOG: peco
DBT_DATABRICKS_LOCATION_ROOT: ${{ secrets.TEST_PECO_EXTERNAL_LOCATION }}test
TEST_PECO_UC_CLUSTER_ID: ${{ secrets.TEST_PECO_UC_CLUSTER_ID }}
UV_FROZEN: "1"
steps:
- name: Check out repository
uses: actions/checkout@v4
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
# For pull_request: checkout the PR head commit
# For workflow_dispatch with pr_number: checkout that PR's head
Expand All @@ -55,7 +59,7 @@ jobs:

- name: Set up python
id: setup-python
uses: actions/setup-python@v5
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
with:
python-version: "3.10"

Expand All @@ -64,18 +68,18 @@ jobs:
shell: sh

- name: Install uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@38f3f104447c67c051c4a08e39b64a148898af3a # v4

- name: Install Hatch
id: install-dependencies
uses: pypa/hatch@install
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # install

- name: Run UC Cluster Functional Tests
run: DBT_TEST_USER=notnecessaryformosttests@example.com DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH=$DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET hatch -v run uc-cluster-e2e

- name: Upload UC Cluster Test Logs
if: always()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: uc-cluster-test-logs
path: logs/
Expand All @@ -93,9 +97,10 @@ jobs:
DBT_DATABRICKS_UC_INITIAL_CATALOG: peco
DBT_DATABRICKS_LOCATION_ROOT: ${{ secrets.TEST_PECO_EXTERNAL_LOCATION }}test
TEST_PECO_UC_CLUSTER_ID: ${{ secrets.TEST_PECO_UC_CLUSTER_ID }}
UV_FROZEN: "1"
steps:
- name: Check out repository
uses: actions/checkout@v4
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
# For pull_request: checkout the PR head commit
# For workflow_dispatch with pr_number: checkout that PR's head
Expand All @@ -107,7 +112,7 @@ jobs:

- name: Set up python
id: setup-python
uses: actions/setup-python@v5
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
with:
python-version: "3.10"

Expand All @@ -116,18 +121,18 @@ jobs:
shell: sh

- name: Install uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@38f3f104447c67c051c4a08e39b64a148898af3a # v4

- name: Install Hatch
id: install-dependencies
uses: pypa/hatch@install
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # install

- name: Run Sql Endpoint Functional Tests
run: DBT_TEST_USER=notnecessaryformosttests@example.com DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH=$DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET hatch -v run sqlw-e2e

- name: Upload SQL Endpoint Test Logs
if: always()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: sql-endpoint-test-logs
path: logs/
Expand All @@ -143,9 +148,10 @@ jobs:
DBT_DATABRICKS_CLIENT_SECRET: ${{ secrets.TEST_PECO_SP_SECRET }}
TEST_PECO_CLUSTER_ID: ${{ secrets.TEST_PECO_CLUSTER_ID }}
DBT_DATABRICKS_LOCATION_ROOT: ${{ secrets.TEST_PECO_EXTERNAL_LOCATION }}test
UV_FROZEN: "1"
steps:
- name: Check out repository
uses: actions/checkout@v4
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
with:
# For pull_request: checkout the PR head commit
# For workflow_dispatch with pr_number: checkout that PR's head
Expand All @@ -157,7 +163,7 @@ jobs:

- name: Set up python
id: setup-python
uses: actions/setup-python@v5
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
with:
python-version: "3.10"

Expand All @@ -166,18 +172,18 @@ jobs:
shell: sh

- name: Install uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@38f3f104447c67c051c4a08e39b64a148898af3a # v4

- name: Install Hatch
id: install-dependencies
uses: pypa/hatch@install
uses: pypa/hatch@257e27e51a6a5616ed08a39a408a21c35c9931bc # install

- name: Run Cluster Functional Tests
run: DBT_TEST_USER=notnecessaryformosttests@example.com DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_HTTP_PATH=$DBT_DATABRICKS_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET hatch -v run cluster-e2e

- name: Upload Cluster Test Logs
if: always()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: cluster-test-logs
path: logs/
Expand Down
Loading
Loading