Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
8f0f7c3
Fix cache warmer: Linux-only, OS-agnostic cache key
gopalldb Apr 8, 2026
2344880
Merge branch 'databricks:main' into main
gopalldb Apr 8, 2026
309cd68
Merge branch 'databricks:main' into main
gopalldb Apr 8, 2026
1534d1c
Merge branch 'databricks:main' into main
gopalldb Apr 8, 2026
b28bbc6
Merge branch 'databricks:main' into main
gopalldb Apr 8, 2026
522ca76
Merge branch 'databricks:main' into main
gopalldb Apr 8, 2026
3b9a5c2
Merge branch 'databricks:main' into main
gopalldb Apr 8, 2026
621eea0
Merge branch 'databricks:main' into main
gopalldb Apr 8, 2026
122a967
Merge branch 'databricks:main' into main
gopalldb Apr 8, 2026
2377e63
Merge branch 'databricks:main' into main
gopalldb Apr 8, 2026
d3559f1
Merge branch 'databricks:main' into main
gopalldb Apr 9, 2026
b84d320
Merge branch 'databricks:main' into main
gopalldb Apr 9, 2026
72636bf
Merge branch 'databricks:main' into main
gopalldb Apr 10, 2026
f4974b5
Merge branch 'databricks:main' into main
gopalldb Apr 20, 2026
6f11980
Merge branch 'databricks:main' into main
gopalldb Apr 21, 2026
419d4ec
Change UseQueryForMetadata default to 1 (SHOW commands)
gopalldb Apr 21, 2026
b5e1023
Merge branch 'main' into feature/use-query-for-metadata-default
gopalldb Apr 21, 2026
8e9ab5b
Change UseQueryForMetadata default to 1 for SQL Warehouses only
gopalldb Apr 21, 2026
681a81d
Expand SEA to Statement Execution API in changelog
gopalldb Apr 21, 2026
9490337
Fix integration tests: set UseQueryForMetadata=0 for fake service
gopalldb Apr 21, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,12 @@
- Added AI coding agent detection to the User-Agent header. When the driver is invoked by a known AI coding agent (e.g. Claude Code, Cursor, Gemini CLI), `agent/<product>` is appended to the User-Agent string.

### Updated
- **[Breaking Change]** Thrift-mode metadata operations (`getTables`, `getColumns`, `getSchemas`, `getFunctions`, `getPrimaryKeys`, `getImportedKeys`, `getCrossReference`) on **SQL Warehouses** now use SQL SHOW commands by default instead of native Thrift RPCs, aligning behavior with Statement Execution API (SEA) mode. All-Purpose Clusters are unaffected and continue using native Thrift RPCs. The `UseQueryForMetadata` connection property default changed from `0` to `1`. To revert to native Thrift RPCs, set `UseQueryForMetadata=0`. Key behavioral changes:
- Catalog parameter is now treated as a literal identifier (not a wildcard pattern) per JDBC spec. Use `null` to search across all catalogs.
- Methods that previously threw exceptions for null/empty edge-case inputs now return empty result sets.
- `getFunctions` now works correctly (was broken via native Thrift RPC).
- Result columns (TABLE_CATALOG, etc.) return stored values (lowercase) instead of preserving input case.
- Connection properties `EnableShowCommandForGetFunctions` and `TreatMetadataCatalogNameAsPattern` are now redundant when `UseQueryForMetadata=1` (the new default).

### Fixed
- Fixed `EnableBatchedInserts` silently falling back to individual execution when table or schema names contain special characters (e.g., hyphens) inside backtick-quoted identifiers. Added a warn log when the fallback occurs.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1101,7 +1101,14 @@ public boolean enableShowCommandsForGetFunctions() {

@Override
public boolean useQueryForMetadata() {
return getParameter(DatabricksJdbcUrlParams.USE_QUERY_FOR_METADATA).equals("1");
// If user explicitly set the property, honour their choice for any compute type
String userValue = getParameterIgnoreDefault(DatabricksJdbcUrlParams.USE_QUERY_FOR_METADATA);
if (userValue != null) {
return userValue.equals("1");
}
// Default: SHOW commands for SQL Warehouses only.
// All-Purpose Clusters default to native Thrift RPCs.
return !(computeResource instanceof AllPurposeCluster);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ public enum DatabricksJdbcUrlParams {
USE_QUERY_FOR_METADATA(
"UseQueryForMetadata",
"Use SQL SHOW commands instead of Thrift RPCs for metadata operations. When enabled, EnableShowCommandForGetFunctions is redundant",
"0"),
"1"),
TREAT_METADATA_CATALOG_NAME_AS_PATTERN(
"TreatMetadataCatalogNameAsPattern",
"Treat catalog names as patterns in Thrift metadata RPCs. When disabled (default), wildcard characters in catalog names are escaped",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1453,4 +1453,38 @@ public void testDefaultGetterCoverage() throws DatabricksSQLException {
assertFalse(ctx.enableShowCommandsForGetFunctions());
assertFalse(ctx.treatMetadataCatalogNameAsPattern());
}

@Test
public void testUseQueryForMetadataDefaultTrueForWarehouse() throws DatabricksSQLException {
// Warehouse URL without explicit UseQueryForMetadata — should default to true
IDatabricksConnectionContext ctx =
DatabricksConnectionContext.parse(TestConstants.VALID_URL_1, properties);
assertTrue(ctx.useQueryForMetadata());
}

@Test
public void testUseQueryForMetadataDefaultFalseForCluster() throws DatabricksSQLException {
// Cluster URL without explicit UseQueryForMetadata — should default to false
IDatabricksConnectionContext ctx =
DatabricksConnectionContext.parse(TestConstants.VALID_CLUSTER_URL, properties);
assertFalse(ctx.useQueryForMetadata());
}

@Test
public void testUseQueryForMetadataExplicitTrueOnCluster() throws DatabricksSQLException {
// Cluster URL with explicit UseQueryForMetadata=1 — should be honoured
IDatabricksConnectionContext ctx =
DatabricksConnectionContext.parse(
TestConstants.VALID_CLUSTER_URL + ";UseQueryForMetadata=1", properties);
assertTrue(ctx.useQueryForMetadata());
}

@Test
public void testUseQueryForMetadataExplicitFalseOnWarehouse() throws DatabricksSQLException {
// Warehouse URL with explicit UseQueryForMetadata=0 — should be honoured
IDatabricksConnectionContext ctx =
DatabricksConnectionContext.parse(
TestConstants.VALID_URL_1 + ";UseQueryForMetadata=0", properties);
assertFalse(ctx.useQueryForMetadata());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,8 @@ public void testOpenAndCloseSession() throws SQLException {
session.open();
assertTrue(session.isOpen());
assertEquals(SESSION_ID, session.getSessionId());
assertInstanceOf(DatabricksThriftServiceClient.class, session.getDatabricksMetadataClient());
// Warehouses use DatabricksMetadataQueryClient (SHOW commands) by default
assertInstanceOf(DatabricksMetadataQueryClient.class, session.getDatabricksMetadataClient());
assertEquals(WAREHOUSE_COMPUTE, session.getComputeResource());
session.close();
assertFalse(session.isOpen());
Expand Down Expand Up @@ -110,7 +111,10 @@ public void testOpenRedirectedThriftSession() throws SQLException {
assertEquals(SESSION_ID, session.getSessionId());
assertEquals(DatabricksClientType.THRIFT, connectionContext.getClientType());
assertInstanceOf(DatabricksThriftServiceClient.class, session.getDatabricksClient());
assertInstanceOf(DatabricksThriftServiceClient.class, session.getDatabricksMetadataClient());
// After redirect, the createProxy mock returns thriftClient for all proxy calls.
// In production, useQueryForMetadata=1 (warehouse default) would create a
// DatabricksMetadataQueryClient. Here the mock collapses it.
assertNotNull(session.getDatabricksMetadataClient());
assertEquals(WAREHOUSE_COMPUTE, session.getComputeResource());

session.close();
Expand All @@ -136,7 +140,8 @@ public void testOpenAndCloseSessionUsingThrift() throws SQLException {
assertTrue(session.isOpen());
assertEquals(SESSION_ID, session.getSessionId());
assertEquals(tSessionHandle, session.getSessionInfo().sessionHandle());
assertEquals(thriftClient, session.getDatabricksMetadataClient());
// Warehouses use DatabricksMetadataQueryClient (SHOW commands) by default
assertInstanceOf(DatabricksMetadataQueryClient.class, session.getDatabricksMetadataClient());
assertEquals(WAREHOUSE_COMPUTE, session.getComputeResource());
session.close();
assertFalse(session.isOpen());
Expand Down Expand Up @@ -315,14 +320,25 @@ static void setupWarehouseWithQueryMetadata() throws SQLException {
}

@Test
public void testUseQueryForMetadataDisabledByDefault() throws SQLException {
public void testUseQueryForMetadataEnabledByDefaultForWarehouse() throws SQLException {
setupWarehouse(true /* useThrift */);
DatabricksSession session = new DatabricksSession(connectionContext, thriftClient);
assertFalse(connectionContext.useQueryForMetadata());
assertTrue(connectionContext.useQueryForMetadata());
assertInstanceOf(
DatabricksThriftServiceClient.class,
DatabricksMetadataQueryClient.class,
session.getDatabricksMetadataClient(),
"Warehouses should use SHOW commands (DatabricksMetadataQueryClient) by default");
}

@Test
public void testUseQueryForMetadataDisabledByDefaultForCluster() throws SQLException {
connectionContext = DatabricksConnectionContext.parse(VALID_CLUSTER_URL, new Properties());
DatabricksSession session = new DatabricksSession(connectionContext, thriftClient);
assertFalse(connectionContext.useQueryForMetadata());
assertEquals(
thriftClient,
session.getDatabricksMetadataClient(),
"When UseQueryForMetadata is default (0), metadata client should be the Thrift client");
"Clusters should use native Thrift RPCs by default");
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,11 @@ void testGetSchemasWithNullCatalogMultipleCatalogSupportEnabled() throws SQLExce
DatabricksJdbcUrlParams.USE_THRIFT_CLIENT.getParamName(),
FakeServiceConfigLoader.shouldUseThriftClient());

String jdbcUrl = getFakeServiceBenchfoodJDBCUrl() + ";enableMultipleCatalogSupport=1";
// UseQueryForMetadata=0: Thrift fake service uses binary protocol stubs with
// exact session handle matching. SHOW command stubs can't be recorded reliably
// because session handles change between recording and replay.
String jdbcUrl =
getFakeServiceBenchfoodJDBCUrl() + ";enableMultipleCatalogSupport=1;UseQueryForMetadata=0";

try (Connection connectionWithMultiCatalog = DriverManager.getConnection(jdbcUrl, props)) {
DatabaseMetaData metaData = connectionWithMultiCatalog.getMetaData();
Expand Down Expand Up @@ -74,7 +78,9 @@ void testGetSchemasWithNullCatalogMultipleCatalogSupportDisabled() throws SQLExc
DatabricksJdbcUrlParams.USE_THRIFT_CLIENT.getParamName(),
FakeServiceConfigLoader.shouldUseThriftClient());

String jdbcUrl = getFakeServiceBenchfoodJDBCUrl() + ";enableMultipleCatalogSupport=0";
// UseQueryForMetadata=0: same rationale as the enabled test above
String jdbcUrl =
getFakeServiceBenchfoodJDBCUrl() + ";enableMultipleCatalogSupport=0;UseQueryForMetadata=0";

try (Connection connectionWithSingleCatalog = DriverManager.getConnection(jdbcUrl, props)) {
DatabaseMetaData metaData = connectionWithSingleCatalog.getMetaData();
Expand Down
Loading