From e46612a437f1880b4425ff434293b5d461ef2148 Mon Sep 17 00:00:00 2001 From: Madhavendra Rathore Date: Wed, 8 Apr 2026 16:01:54 +0530 Subject: [PATCH 1/7] SEA metadata: throw exceptions for invalid null/empty params matching Thrift parity Change key-based SEA metadata operations (getPrimaryKeys, getImportedKeys, getExportedKeys, getCrossReference) to throw SQLException instead of returning empty ResultSets for invalid parameter combinations like null/empty table or null schema with explicit catalog. This matches Thrift server behavior. Co-authored-by: Isaac Signed-off-by: Madhavendra Rathore --- .../api/impl/DatabricksDatabaseMetaData.java | 6 +- .../DatabricksMetadataQueryClient.java | 82 +++++-------- .../DatabricksMetadataQueryClientTest.java | 104 +++++++++++----- .../tests/MetadataNullResolutionTests.java | 116 +++++++++++++----- 4 files changed, 188 insertions(+), 120 deletions(-) diff --git a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java index 2e695274c..430215f2c 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java +++ b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java @@ -1132,9 +1132,11 @@ public ResultSet getCrossReference( foreignTable)); throwExceptionIfConnectionIsClosed(); - if (parentTable == null && foreignTable == null) { + boolean parentTableMissing = parentTable == null || parentTable.isEmpty(); + boolean foreignTableMissing = foreignTable == null || foreignTable.isEmpty(); + if (parentTableMissing && foreignTableMissing) { throw new DatabricksSQLException( - "Invalid argument: foreignTable and parentTableName are both null", + "Invalid argument: foreignTable and parentTableName are both null or empty", DatabricksDriverErrorCode.INVALID_STATE); } diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java index fe11508b0..8c476f094 100644 --- a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java +++ b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java @@ -14,8 +14,10 @@ import com.databricks.jdbc.dbclient.IDatabricksMetadataClient; import com.databricks.jdbc.dbclient.impl.common.CommandConstants; import com.databricks.jdbc.dbclient.impl.common.MetadataResultSetBuilder; +import com.databricks.jdbc.exception.DatabricksSQLException; import com.databricks.jdbc.log.JdbcLogger; import com.databricks.jdbc.log.JdbcLoggerFactory; +import com.databricks.jdbc.model.telemetry.enums.DatabricksDriverErrorCode; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; @@ -324,19 +326,6 @@ public DatabricksResultSet listPrimaryKeys( catalog = autoFillCatalog(catalog, currentCatalog); String[] resolvedParams = resolveKeyBasedParams(catalog, schema, table, session); - if (resolvedParams == null) { - LOGGER.debug( - "Could not resolve key-based params (catalog={}, schema={}, table={}), returning empty result set for listPrimaryKeys", - catalog, - schema, - table); - return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns( - PRIMARY_KEYS_COLUMNS, - new ArrayList<>(), - METADATA_STATEMENT_ID, - com.databricks.jdbc.common.CommandName.LIST_PRIMARY_KEYS); - } - String resolvedCatalog = resolvedParams[0]; String resolvedSchema = resolvedParams[1]; String resolvedTable = resolvedParams[2]; @@ -373,19 +362,6 @@ public DatabricksResultSet listImportedKeys( catalog = autoFillCatalog(catalog, currentCatalog); String[] resolvedParams = resolveKeyBasedParams(catalog, schema, table, session); - if (resolvedParams == null) { - LOGGER.debug( - "Could not resolve key-based params (catalog={}, schema={}, table={}), returning empty result set for listImportedKeys", - catalog, - schema, - table); - return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns( - IMPORTED_KEYS_COLUMNS, - new ArrayList<>(), - METADATA_STATEMENT_ID, - com.databricks.jdbc.common.CommandName.GET_IMPORTED_KEYS); - } - String resolvedCatalog = resolvedParams[0]; String resolvedSchema = resolvedParams[1]; String resolvedTable = resolvedParams[2]; @@ -414,6 +390,12 @@ public DatabricksResultSet listExportedKeys( IDatabricksSession session, String catalog, String schema, String table) throws SQLException { LOGGER.debug("public ResultSet listExportedKeys() using SDK"); + if (table == null || table.isEmpty()) { + throw new DatabricksSQLException( + "Invalid argument: tableName may not be null or empty", + DatabricksDriverErrorCode.INVALID_STATE); + } + // Only fetch currentCatalog if multiple catalog support is disabled String currentCatalog = isMultipleCatalogSupportDisabled() ? session.getCurrentCatalog() : null; if (!metadataResultSetBuilder.shouldAllowCatalogAccess(catalog, currentCatalog, session)) { @@ -446,29 +428,12 @@ public DatabricksResultSet listCrossReferences( return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>()); } - // Resolve null params for the foreign side (used to build the SQL query) + // Resolve null/empty params for the foreign side (used to build the SQL query) String[] resolvedForeignParams = resolveKeyBasedParams(foreignCatalog, foreignSchema, foreignTable, session); - if (resolvedForeignParams == null) { - LOGGER.debug( - "Could not resolve foreign key-based params (catalog={}, schema={}, table={}), returning empty result set", - foreignCatalog, - foreignSchema, - foreignTable); - return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>()); - } - - // Resolve null params for the parent side (used for filtering results) + // Resolve null/empty params for the parent side (used for filtering results) String[] resolvedParentParams = resolveKeyBasedParams(parentCatalog, parentSchema, parentTable, session); - if (resolvedParentParams == null) { - LOGGER.debug( - "Could not resolve parent key-based params (catalog={}, schema={}, table={}), returning empty result set", - parentCatalog, - parentSchema, - parentTable); - return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>()); - } String resolvedForeignCatalog = resolvedForeignParams[0]; String resolvedForeignSchema = resolvedForeignParams[1]; @@ -542,15 +507,20 @@ private String autoFillCatalog(String catalog, String currentCatalog) { } /** - * Resolves null catalog/schema for key-based metadata operations to match Thrift server behavior. - * When catalog is null, it is replaced with current_catalog and (if schema is also null) schema - * is replaced with current_schema. Returns null if the caller should return an empty result set - * (table is null, schema is null without catalog also being null, or any resolved value is null). + * Validates and resolves null/empty catalog/schema/table for key-based metadata operations to + * match Thrift server behavior. Throws DatabricksSQLException for invalid parameter combinations + * (matching Thrift error behavior). When catalog is null, it is replaced with current_catalog and + * (if schema is also null) schema is replaced with current_schema. + * + * @throws DatabricksSQLException if table is null/empty, or schema is null/empty with an explicit + * catalog */ private String[] resolveKeyBasedParams( String catalog, String schema, String table, IDatabricksSession session) throws SQLException { - if (table == null) { - return null; + if (table == null || table.isEmpty()) { + throw new DatabricksSQLException( + "Invalid argument: tableName may not be null or empty", + DatabricksDriverErrorCode.INVALID_STATE); } if (catalog == null) { @@ -559,12 +529,16 @@ private String[] resolveKeyBasedParams( if (schema == null) { schema = currentCatalogAndSchema[1]; } - } else if (schema == null) { - return null; + } else if (schema == null || schema.isEmpty()) { + throw new DatabricksSQLException( + "Invalid argument: schema may not be null or empty when catalog is specified", + DatabricksDriverErrorCode.INVALID_STATE); } if (catalog == null || schema == null) { - return null; + throw new DatabricksSQLException( + "Invalid argument: could not resolve catalog or schema", + DatabricksDriverErrorCode.INVALID_STATE); } return new String[] {catalog, schema, table}; diff --git a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java index a0935ad20..9080fafd8 100644 --- a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java +++ b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java @@ -764,13 +764,15 @@ void testListCrossReferences() throws Exception { * delegates to getExportedKeys which returns empty in DBSQL. */ @Test - void testListCrossReferences_allForeignParamsNull_returnsEmpty() throws Exception { + void testListCrossReferences_allForeignParamsNull_throws() { DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); - DatabricksResultSet result = - metadataClient.listCrossReferences( - session, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE, null, null, null); - assertFalse(result.next(), "Should return empty when all foreign params are null, not throw"); + assertThrows( + DatabricksSQLException.class, + () -> + metadataClient.listCrossReferences( + session, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE, null, null, null), + "Should throw when foreign table is null"); } @Test @@ -943,40 +945,78 @@ void testListFunctionsWithNullCatalog() throws SQLException { } @Test - void testKeyBasedOpsReturnEmptyForNullTable() throws SQLException { + void testKeyBasedOpsThrowForNullTable() { + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, null), + "listPrimaryKeys should throw for null table"); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null), + "listImportedKeys should throw for null table"); + } + + @Test + void testKeyBasedOpsThrowForEmptyTable() { + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, ""), + "listPrimaryKeys should throw for empty table"); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""), + "listImportedKeys should throw for empty table"); + } + + @Test + void testKeyBasedOpsThrowForNullSchemaWithExplicitCatalog() { DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); - // null table should return empty for listPrimaryKeys - DatabricksResultSet pkResult = - metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, null); - assertNotNull(pkResult); - assertFalse(pkResult.next(), "Expected empty result set for listPrimaryKeys with null table"); - - // null table should return empty for listImportedKeys - DatabricksResultSet ikResult = - metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null); - assertNotNull(ikResult); - assertFalse(ikResult.next(), "Expected empty result set for listImportedKeys with null table"); + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listPrimaryKeys(session, "any_catalog", null, TEST_TABLE), + "listPrimaryKeys should throw for null schema with explicit catalog"); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listImportedKeys(session, "any_catalog", null, TEST_TABLE), + "listImportedKeys should throw for null schema with explicit catalog"); } @Test - void testKeyBasedOpsReturnEmptyForNullSchemaWithExplicitCatalog() throws SQLException { + void testKeyBasedOpsThrowForEmptySchemaWithExplicitCatalog() { DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); - // schema=null with explicit catalog should return empty (matching Thrift behavior) - DatabricksResultSet pkResult = - metadataClient.listPrimaryKeys(session, "any_catalog", null, TEST_TABLE); - assertNotNull(pkResult); - assertFalse( - pkResult.next(), - "Expected empty result set for listPrimaryKeys with null schema and explicit catalog"); - - DatabricksResultSet ikResult = - metadataClient.listImportedKeys(session, "any_catalog", null, TEST_TABLE); - assertNotNull(ikResult); - assertFalse( - ikResult.next(), - "Expected empty result set for listImportedKeys with null schema and explicit catalog"); + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listPrimaryKeys(session, "any_catalog", "", TEST_TABLE), + "listPrimaryKeys should throw for empty schema with explicit catalog"); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listImportedKeys(session, "any_catalog", "", TEST_TABLE), + "listImportedKeys should throw for empty schema with explicit catalog"); + } + + @Test + void testExportedKeysThrowsForNullTable() { + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null), + "listExportedKeys should throw for null table"); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""), + "listExportedKeys should throw for empty table"); } @Test diff --git a/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java b/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java index 05c61d11b..5d26a4117 100644 --- a/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java +++ b/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java @@ -127,27 +127,35 @@ void testGetPrimaryKeys_fullySpecified() throws SQLException { @Test @Order(4) - void testGetPrimaryKeys_nullTableReturnsEmpty() throws SQLException { + void testGetPrimaryKeys_nullTableThrows() throws SQLException { assertNotNull(connection); DatabaseMetaData md = connection.getMetaData(); - try (ResultSet rs = md.getPrimaryKeys(testCatalog, TEST_SCHEMA_NAME, null)) { - assertNotNull(rs); - assertFalse(rs.next(), "Should return empty result when table is null"); - } + assertThrows( + SQLException.class, + () -> md.getPrimaryKeys(testCatalog, TEST_SCHEMA_NAME, null), + "Should throw when table is null"); } @Test @Order(5) - void testGetPrimaryKeys_explicitCatalogNullSchemaReturnsEmpty() throws SQLException { + void testGetPrimaryKeys_explicitCatalogNullSchemaThrows() throws SQLException { assertNotNull(connection); DatabaseMetaData md = connection.getMetaData(); - // Even when catalog matches the current catalog, null schema with explicit catalog → empty - try (ResultSet rs = md.getPrimaryKeys(testCatalog, null, PARENT_TABLE)) { - assertNotNull(rs); - assertFalse( - rs.next(), - "Should return empty result when schema is null and catalog is explicitly provided"); - } + assertThrows( + SQLException.class, + () -> md.getPrimaryKeys(testCatalog, null, PARENT_TABLE), + "Should throw when schema is null and catalog is explicitly provided"); + } + + @Test + @Order(6) + void testGetPrimaryKeys_emptyTableThrows() throws SQLException { + assertNotNull(connection); + DatabaseMetaData md = connection.getMetaData(); + assertThrows( + SQLException.class, + () -> md.getPrimaryKeys(testCatalog, TEST_SCHEMA_NAME, ""), + "Should throw when table is empty string"); } // ==================== getImportedKeys ==================== @@ -192,13 +200,24 @@ void testGetImportedKeys_fullySpecified() throws SQLException { @Test @Order(12) - void testGetImportedKeys_nullTableReturnsEmpty() throws SQLException { + void testGetImportedKeys_nullTableThrows() throws SQLException { assertNotNull(connection); DatabaseMetaData md = connection.getMetaData(); - try (ResultSet rs = md.getImportedKeys(testCatalog, TEST_SCHEMA_NAME, null)) { - assertNotNull(rs); - assertFalse(rs.next(), "Should return empty result when table is null"); - } + assertThrows( + SQLException.class, + () -> md.getImportedKeys(testCatalog, TEST_SCHEMA_NAME, null), + "Should throw when table is null"); + } + + @Test + @Order(13) + void testGetExportedKeys_nullTableThrows() throws SQLException { + assertNotNull(connection); + DatabaseMetaData md = connection.getMetaData(); + assertThrows( + SQLException.class, + () -> md.getExportedKeys(testCatalog, TEST_SCHEMA_NAME, null), + "Should throw when table is null"); } // ==================== getCrossReference ==================== @@ -264,28 +283,61 @@ void testGetCrossReference_fullySpecified() throws SQLException { @Test @Order(23) - void testGetCrossReference_nullForeignTableReturnsEmpty() throws SQLException { + void testGetCrossReference_nullForeignTableThrows() throws SQLException { assertNotNull(connection); DatabaseMetaData md = connection.getMetaData(); - try (ResultSet rs = - md.getCrossReference( - testCatalog, TEST_SCHEMA_NAME, PARENT_TABLE, testCatalog, TEST_SCHEMA_NAME, null)) { - assertNotNull(rs); - assertFalse(rs.next(), "Should return empty result when foreign table is null"); - } + assertThrows( + SQLException.class, + () -> + md.getCrossReference( + testCatalog, TEST_SCHEMA_NAME, PARENT_TABLE, testCatalog, TEST_SCHEMA_NAME, null), + "Should throw when foreign table is null"); } @Test @Order(24) - void testGetCrossReference_nullParentTableReturnsEmpty() throws SQLException { + void testGetCrossReference_nullParentTableThrows() throws SQLException { assertNotNull(connection); DatabaseMetaData md = connection.getMetaData(); - try (ResultSet rs = - md.getCrossReference( - testCatalog, TEST_SCHEMA_NAME, null, testCatalog, TEST_SCHEMA_NAME, CHILD_TABLE)) { - assertNotNull(rs); - assertFalse(rs.next(), "Should return empty result when parent table is null"); - } + assertThrows( + SQLException.class, + () -> + md.getCrossReference( + testCatalog, TEST_SCHEMA_NAME, null, testCatalog, TEST_SCHEMA_NAME, CHILD_TABLE), + "Should throw when parent table is null"); + } + + @Test + @Order(25) + void testGetCrossReference_allEmptyThrows() throws SQLException { + assertNotNull(connection); + DatabaseMetaData md = connection.getMetaData(); + assertThrows( + SQLException.class, + () -> md.getCrossReference("", "", "", "", "", ""), + "Should throw when all parameters are empty strings"); + } + + @Test + @Order(26) + void testGetCrossReference_emptyForeignSideThrows() throws SQLException { + assertNotNull(connection); + DatabaseMetaData md = connection.getMetaData(); + assertThrows( + SQLException.class, + () -> md.getCrossReference(testCatalog, TEST_SCHEMA_NAME, PARENT_TABLE, "", "", ""), + "Should throw when foreign side is all empty"); + } + + @Test + @Order(27) + void testGetCrossReference_emptyParentSideThrows() throws SQLException { + assertNotNull(connection); + DatabaseMetaData md = connection.getMetaData(); + assertThrows( + SQLException.class, + () -> md.getCrossReference("", "", "", testCatalog, TEST_SCHEMA_NAME, CHILD_TABLE), + "Should throw when parent side is all empty"); } // ==================== Cleanup ==================== From c39197e2a60dd2a82ea883c47716f9483db4c571 Mon Sep 17 00:00:00 2001 From: Madhavendra Rathore Date: Wed, 8 Apr 2026 16:21:34 +0530 Subject: [PATCH 2/7] Add debug logging before throwing exceptions in metadata validation Address PR review comments: add LOGGER.debug() calls before each throw site in getCrossReference empty-string check, listExportedKeys null table check, and resolveKeyBasedParams validation. Also add comment clarifying the safety-net check for getCurrentCatalogAndSchema returning null values. Co-authored-by: Isaac Signed-off-by: Madhavendra Rathore --- .../jdbc/api/impl/DatabricksDatabaseMetaData.java | 2 ++ .../impl/sqlexec/DatabricksMetadataQueryClient.java | 10 ++++++++++ 2 files changed, 12 insertions(+) diff --git a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java index 430215f2c..f2bd95839 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java +++ b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java @@ -1135,6 +1135,8 @@ public ResultSet getCrossReference( boolean parentTableMissing = parentTable == null || parentTable.isEmpty(); boolean foreignTableMissing = foreignTable == null || foreignTable.isEmpty(); if (parentTableMissing && foreignTableMissing) { + LOGGER.debug( + "getCrossReference: both parentTable and foreignTable are null or empty, throwing"); throw new DatabricksSQLException( "Invalid argument: foreignTable and parentTableName are both null or empty", DatabricksDriverErrorCode.INVALID_STATE); diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java index 8c476f094..c58ddedb6 100644 --- a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java +++ b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java @@ -391,6 +391,7 @@ public DatabricksResultSet listExportedKeys( LOGGER.debug("public ResultSet listExportedKeys() using SDK"); if (table == null || table.isEmpty()) { + LOGGER.debug("listExportedKeys: table is null or empty, throwing"); throw new DatabricksSQLException( "Invalid argument: tableName may not be null or empty", DatabricksDriverErrorCode.INVALID_STATE); @@ -518,6 +519,7 @@ private String autoFillCatalog(String catalog, String currentCatalog) { private String[] resolveKeyBasedParams( String catalog, String schema, String table, IDatabricksSession session) throws SQLException { if (table == null || table.isEmpty()) { + LOGGER.debug("resolveKeyBasedParams: table is null or empty, throwing"); throw new DatabricksSQLException( "Invalid argument: tableName may not be null or empty", DatabricksDriverErrorCode.INVALID_STATE); @@ -530,12 +532,20 @@ private String[] resolveKeyBasedParams( schema = currentCatalogAndSchema[1]; } } else if (schema == null || schema.isEmpty()) { + LOGGER.debug( + "resolveKeyBasedParams: schema is null or empty with explicit catalog '{}', throwing", + catalog); throw new DatabricksSQLException( "Invalid argument: schema may not be null or empty when catalog is specified", DatabricksDriverErrorCode.INVALID_STATE); } + // Safety net: getCurrentCatalogAndSchema() returned null values if (catalog == null || schema == null) { + LOGGER.debug( + "resolveKeyBasedParams: could not resolve catalog or schema (catalog={}, schema={})", + catalog, + schema); throw new DatabricksSQLException( "Invalid argument: could not resolve catalog or schema", DatabricksDriverErrorCode.INVALID_STATE); From 22e416ae73bb09bca24b5a540779a4b629e38ccd Mon Sep 17 00:00:00 2001 From: Madhavendra Rathore Date: Wed, 8 Apr 2026 16:45:18 +0530 Subject: [PATCH 3/7] =?UTF-8?q?Remove=20empty=20string=20validation=20?= =?UTF-8?q?=E2=80=94=20only=20throw=20for=20null=20params?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Empty strings are passed through to the server like Thrift does, instead of being treated as invalid. Only null values trigger exceptions for table and schema parameters. Co-authored-by: Isaac Signed-off-by: Madhavendra Rathore --- .../api/impl/DatabricksDatabaseMetaData.java | 9 ++-- .../DatabricksMetadataQueryClient.java | 28 +++++------- .../DatabricksMetadataQueryClientTest.java | 35 --------------- .../tests/MetadataNullResolutionTests.java | 44 ------------------- 4 files changed, 15 insertions(+), 101 deletions(-) diff --git a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java index f2bd95839..455c7253e 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java +++ b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java @@ -1132,13 +1132,10 @@ public ResultSet getCrossReference( foreignTable)); throwExceptionIfConnectionIsClosed(); - boolean parentTableMissing = parentTable == null || parentTable.isEmpty(); - boolean foreignTableMissing = foreignTable == null || foreignTable.isEmpty(); - if (parentTableMissing && foreignTableMissing) { - LOGGER.debug( - "getCrossReference: both parentTable and foreignTable are null or empty, throwing"); + if (parentTable == null && foreignTable == null) { + LOGGER.debug("getCrossReference: both parentTable and foreignTable are null, throwing"); throw new DatabricksSQLException( - "Invalid argument: foreignTable and parentTableName are both null or empty", + "Invalid argument: foreignTable and parentTableName are both null", DatabricksDriverErrorCode.INVALID_STATE); } diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java index c58ddedb6..860dc27b2 100644 --- a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java +++ b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java @@ -390,11 +390,10 @@ public DatabricksResultSet listExportedKeys( IDatabricksSession session, String catalog, String schema, String table) throws SQLException { LOGGER.debug("public ResultSet listExportedKeys() using SDK"); - if (table == null || table.isEmpty()) { - LOGGER.debug("listExportedKeys: table is null or empty, throwing"); + if (table == null) { + LOGGER.debug("listExportedKeys: table is null, throwing"); throw new DatabricksSQLException( - "Invalid argument: tableName may not be null or empty", - DatabricksDriverErrorCode.INVALID_STATE); + "Invalid argument: tableName may not be null", DatabricksDriverErrorCode.INVALID_STATE); } // Only fetch currentCatalog if multiple catalog support is disabled @@ -508,21 +507,19 @@ private String autoFillCatalog(String catalog, String currentCatalog) { } /** - * Validates and resolves null/empty catalog/schema/table for key-based metadata operations to - * match Thrift server behavior. Throws DatabricksSQLException for invalid parameter combinations + * Validates and resolves null catalog/schema/table for key-based metadata operations to match + * Thrift server behavior. Throws DatabricksSQLException for invalid parameter combinations * (matching Thrift error behavior). When catalog is null, it is replaced with current_catalog and * (if schema is also null) schema is replaced with current_schema. * - * @throws DatabricksSQLException if table is null/empty, or schema is null/empty with an explicit - * catalog + * @throws DatabricksSQLException if table is null, or schema is null with an explicit catalog */ private String[] resolveKeyBasedParams( String catalog, String schema, String table, IDatabricksSession session) throws SQLException { - if (table == null || table.isEmpty()) { - LOGGER.debug("resolveKeyBasedParams: table is null or empty, throwing"); + if (table == null) { + LOGGER.debug("resolveKeyBasedParams: table is null, throwing"); throw new DatabricksSQLException( - "Invalid argument: tableName may not be null or empty", - DatabricksDriverErrorCode.INVALID_STATE); + "Invalid argument: tableName may not be null", DatabricksDriverErrorCode.INVALID_STATE); } if (catalog == null) { @@ -531,12 +528,11 @@ private String[] resolveKeyBasedParams( if (schema == null) { schema = currentCatalogAndSchema[1]; } - } else if (schema == null || schema.isEmpty()) { + } else if (schema == null) { LOGGER.debug( - "resolveKeyBasedParams: schema is null or empty with explicit catalog '{}', throwing", - catalog); + "resolveKeyBasedParams: schema is null with explicit catalog '{}', throwing", catalog); throw new DatabricksSQLException( - "Invalid argument: schema may not be null or empty when catalog is specified", + "Invalid argument: schema may not be null when catalog is specified", DatabricksDriverErrorCode.INVALID_STATE); } diff --git a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java index 9080fafd8..e788f97f6 100644 --- a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java +++ b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java @@ -959,21 +959,6 @@ void testKeyBasedOpsThrowForNullTable() { "listImportedKeys should throw for null table"); } - @Test - void testKeyBasedOpsThrowForEmptyTable() { - DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); - - assertThrows( - DatabricksSQLException.class, - () -> metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, ""), - "listPrimaryKeys should throw for empty table"); - - assertThrows( - DatabricksSQLException.class, - () -> metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""), - "listImportedKeys should throw for empty table"); - } - @Test void testKeyBasedOpsThrowForNullSchemaWithExplicitCatalog() { DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); @@ -989,21 +974,6 @@ void testKeyBasedOpsThrowForNullSchemaWithExplicitCatalog() { "listImportedKeys should throw for null schema with explicit catalog"); } - @Test - void testKeyBasedOpsThrowForEmptySchemaWithExplicitCatalog() { - DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); - - assertThrows( - DatabricksSQLException.class, - () -> metadataClient.listPrimaryKeys(session, "any_catalog", "", TEST_TABLE), - "listPrimaryKeys should throw for empty schema with explicit catalog"); - - assertThrows( - DatabricksSQLException.class, - () -> metadataClient.listImportedKeys(session, "any_catalog", "", TEST_TABLE), - "listImportedKeys should throw for empty schema with explicit catalog"); - } - @Test void testExportedKeysThrowsForNullTable() { DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); @@ -1012,11 +982,6 @@ void testExportedKeysThrowsForNullTable() { DatabricksSQLException.class, () -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null), "listExportedKeys should throw for null table"); - - assertThrows( - DatabricksSQLException.class, - () -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""), - "listExportedKeys should throw for empty table"); } @Test diff --git a/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java b/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java index 5d26a4117..5fdf2c29a 100644 --- a/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java +++ b/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java @@ -147,17 +147,6 @@ void testGetPrimaryKeys_explicitCatalogNullSchemaThrows() throws SQLException { "Should throw when schema is null and catalog is explicitly provided"); } - @Test - @Order(6) - void testGetPrimaryKeys_emptyTableThrows() throws SQLException { - assertNotNull(connection); - DatabaseMetaData md = connection.getMetaData(); - assertThrows( - SQLException.class, - () -> md.getPrimaryKeys(testCatalog, TEST_SCHEMA_NAME, ""), - "Should throw when table is empty string"); - } - // ==================== getImportedKeys ==================== @Test @@ -307,39 +296,6 @@ void testGetCrossReference_nullParentTableThrows() throws SQLException { "Should throw when parent table is null"); } - @Test - @Order(25) - void testGetCrossReference_allEmptyThrows() throws SQLException { - assertNotNull(connection); - DatabaseMetaData md = connection.getMetaData(); - assertThrows( - SQLException.class, - () -> md.getCrossReference("", "", "", "", "", ""), - "Should throw when all parameters are empty strings"); - } - - @Test - @Order(26) - void testGetCrossReference_emptyForeignSideThrows() throws SQLException { - assertNotNull(connection); - DatabaseMetaData md = connection.getMetaData(); - assertThrows( - SQLException.class, - () -> md.getCrossReference(testCatalog, TEST_SCHEMA_NAME, PARENT_TABLE, "", "", ""), - "Should throw when foreign side is all empty"); - } - - @Test - @Order(27) - void testGetCrossReference_emptyParentSideThrows() throws SQLException { - assertNotNull(connection); - DatabaseMetaData md = connection.getMetaData(); - assertThrows( - SQLException.class, - () -> md.getCrossReference("", "", "", testCatalog, TEST_SCHEMA_NAME, CHILD_TABLE), - "Should throw when parent side is all empty"); - } - // ==================== Cleanup ==================== @Test From 94c6e15c9b73bffbe1832eb18afe19cc85cb80c0 Mon Sep 17 00:00:00 2001 From: Madhavendra Rathore Date: Wed, 8 Apr 2026 23:23:25 +0530 Subject: [PATCH 4/7] getCrossReference: null table returns empty, empty string table throws Match Thrift behavior for getCrossReference: - null table on either side = "unspecified", return empty ResultSet - empty string table = invalid, throw SQLException (Thrift server rejects these) Co-authored-by: Isaac Signed-off-by: Madhavendra Rathore --- .../api/impl/DatabricksDatabaseMetaData.java | 8 +++ .../DatabricksMetadataQueryClient.java | 11 +++- .../DatabricksMetadataQueryClientTest.java | 31 +++++++---- .../tests/MetadataNullResolutionTests.java | 51 ++++++++++++++++--- 4 files changed, 82 insertions(+), 19 deletions(-) diff --git a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java index 455c7253e..9ae4260ff 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java +++ b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java @@ -1138,6 +1138,14 @@ public ResultSet getCrossReference( "Invalid argument: foreignTable and parentTableName are both null", DatabricksDriverErrorCode.INVALID_STATE); } + // Empty string table names are invalid — Thrift server rejects them, so we do too + boolean parentTableEmpty = parentTable != null && parentTable.isEmpty(); + boolean foreignTableEmpty = foreignTable != null && foreignTable.isEmpty(); + if (parentTableEmpty || foreignTableEmpty) { + LOGGER.debug("getCrossReference: parentTable or foreignTable is empty string, throwing"); + throw new DatabricksSQLException( + "Invalid argument: table name may not be empty", DatabricksDriverErrorCode.INVALID_STATE); + } return session .getDatabricksMetadataClient() diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java index 860dc27b2..e1e8a9e2a 100644 --- a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java +++ b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java @@ -420,6 +420,13 @@ public DatabricksResultSet listCrossReferences( throws SQLException { LOGGER.debug("public ResultSet listCrossReferences() using SDK"); + // Null table on either side means "unspecified" — Thrift server returns empty ResultSet + if (foreignTable == null || parentTable == null) { + LOGGER.debug( + "listCrossReferences: foreignTable or parentTable is null, returning empty result set"); + return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>()); + } + // Only fetch currentCatalog if multiple catalog support is disabled String currentCatalog = isMultipleCatalogSupportDisabled() ? session.getCurrentCatalog() : null; if (!metadataResultSetBuilder.shouldAllowCatalogAccess(parentCatalog, currentCatalog, session) @@ -428,10 +435,10 @@ public DatabricksResultSet listCrossReferences( return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>()); } - // Resolve null/empty params for the foreign side (used to build the SQL query) + // Resolve null catalog/schema for the foreign side (used to build the SQL query) String[] resolvedForeignParams = resolveKeyBasedParams(foreignCatalog, foreignSchema, foreignTable, session); - // Resolve null/empty params for the parent side (used for filtering results) + // Resolve null catalog/schema for the parent side (used for filtering results) String[] resolvedParentParams = resolveKeyBasedParams(parentCatalog, parentSchema, parentTable, session); diff --git a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java index e788f97f6..7ed0a9428 100644 --- a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java +++ b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java @@ -759,20 +759,31 @@ void testListCrossReferences() throws Exception { } /** - * Tests that getCrossReference returns empty result set (not an exception) when all three - * foreign-side parameters are null. Matches Thrift server behavior where null foreign table - * delegates to getExportedKeys which returns empty in DBSQL. + * Tests that getCrossReference returns empty result set (not an exception) when foreign table is + * null. Matches Thrift server behavior where null table means "unspecified" and returns empty. */ @Test - void testListCrossReferences_allForeignParamsNull_throws() { + void testListCrossReferences_allForeignParamsNull_returnsEmpty() throws Exception { DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); - assertThrows( - DatabricksSQLException.class, - () -> - metadataClient.listCrossReferences( - session, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE, null, null, null), - "Should throw when foreign table is null"); + DatabricksResultSet result = + metadataClient.listCrossReferences( + session, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE, null, null, null); + assertFalse(result.next(), "Should return empty when foreign table is null"); + } + + /** + * Tests that getCrossReference returns empty result set when parent table is null. Matches Thrift + * server behavior. + */ + @Test + void testListCrossReferences_parentTableNull_returnsEmpty() throws Exception { + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + + DatabricksResultSet result = + metadataClient.listCrossReferences( + session, null, null, null, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE); + assertFalse(result.next(), "Should return empty when parent table is null"); } @Test diff --git a/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java b/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java index 5fdf2c29a..6bf86f9c2 100644 --- a/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java +++ b/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java @@ -272,28 +272,65 @@ void testGetCrossReference_fullySpecified() throws SQLException { @Test @Order(23) - void testGetCrossReference_nullForeignTableThrows() throws SQLException { + void testGetCrossReference_nullForeignTableReturnsEmpty() throws SQLException { + assertNotNull(connection); + DatabaseMetaData md = connection.getMetaData(); + try (ResultSet rs = + md.getCrossReference( + testCatalog, TEST_SCHEMA_NAME, PARENT_TABLE, testCatalog, TEST_SCHEMA_NAME, null)) { + assertNotNull(rs); + assertFalse(rs.next(), "Should return empty when foreign table is null"); + } + } + + @Test + @Order(24) + void testGetCrossReference_nullParentTableReturnsEmpty() throws SQLException { + assertNotNull(connection); + DatabaseMetaData md = connection.getMetaData(); + try (ResultSet rs = + md.getCrossReference( + testCatalog, TEST_SCHEMA_NAME, null, testCatalog, TEST_SCHEMA_NAME, CHILD_TABLE)) { + assertNotNull(rs); + assertFalse(rs.next(), "Should return empty when parent table is null"); + } + } + + @Test + @Order(25) + void testGetCrossReference_emptyForeignTableThrows() throws SQLException { assertNotNull(connection); DatabaseMetaData md = connection.getMetaData(); assertThrows( SQLException.class, () -> md.getCrossReference( - testCatalog, TEST_SCHEMA_NAME, PARENT_TABLE, testCatalog, TEST_SCHEMA_NAME, null), - "Should throw when foreign table is null"); + testCatalog, TEST_SCHEMA_NAME, PARENT_TABLE, testCatalog, TEST_SCHEMA_NAME, ""), + "Should throw when foreign table is empty string"); } @Test - @Order(24) - void testGetCrossReference_nullParentTableThrows() throws SQLException { + @Order(26) + void testGetCrossReference_emptyParentTableThrows() throws SQLException { assertNotNull(connection); DatabaseMetaData md = connection.getMetaData(); assertThrows( SQLException.class, () -> md.getCrossReference( - testCatalog, TEST_SCHEMA_NAME, null, testCatalog, TEST_SCHEMA_NAME, CHILD_TABLE), - "Should throw when parent table is null"); + testCatalog, TEST_SCHEMA_NAME, "", testCatalog, TEST_SCHEMA_NAME, CHILD_TABLE), + "Should throw when parent table is empty string"); + } + + @Test + @Order(27) + void testGetCrossReference_allEmptyThrows() throws SQLException { + assertNotNull(connection); + DatabaseMetaData md = connection.getMetaData(); + assertThrows( + SQLException.class, + () -> md.getCrossReference("", "", "", "", "", ""), + "Should throw when all parameters are empty strings"); } // ==================== Cleanup ==================== From 7cf3826c7f24e912431bcc8e97f68ad933b140cf Mon Sep 17 00:00:00 2001 From: Madhavendra Rathore Date: Thu, 9 Apr 2026 00:19:02 +0530 Subject: [PATCH 5/7] Refine throw/empty parity: empty table throws, null foreignTable returns empty Based on comparator results, align all key-based metadata ops with Thrift: - getPrimaryKeys/getImportedKeys/getExportedKeys: throw for null OR empty table - getCrossReference: throw for null/empty parentTable, throw for empty foreignTable, return empty ResultSet for null foreignTable (Thrift treats as "unspecified") Co-authored-by: Isaac Signed-off-by: Madhavendra Rathore --- .../api/impl/DatabricksDatabaseMetaData.java | 18 +++++----- .../DatabricksMetadataQueryClient.java | 24 +++++++------- .../DatabricksMetadataQueryClientTest.java | 33 +++++++++++++++---- .../tests/MetadataNullResolutionTests.java | 25 ++++++++++---- 4 files changed, 67 insertions(+), 33 deletions(-) diff --git a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java index 9ae4260ff..3064184d6 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java +++ b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java @@ -1132,19 +1132,19 @@ public ResultSet getCrossReference( foreignTable)); throwExceptionIfConnectionIsClosed(); - if (parentTable == null && foreignTable == null) { - LOGGER.debug("getCrossReference: both parentTable and foreignTable are null, throwing"); + // Thrift requires parentTable — null or empty parentTable is invalid + if (parentTable == null || parentTable.isEmpty()) { + LOGGER.debug("getCrossReference: parentTable is null or empty, throwing"); throw new DatabricksSQLException( - "Invalid argument: foreignTable and parentTableName are both null", + "Invalid argument: parentTable may not be null or empty", DatabricksDriverErrorCode.INVALID_STATE); } - // Empty string table names are invalid — Thrift server rejects them, so we do too - boolean parentTableEmpty = parentTable != null && parentTable.isEmpty(); - boolean foreignTableEmpty = foreignTable != null && foreignTable.isEmpty(); - if (parentTableEmpty || foreignTableEmpty) { - LOGGER.debug("getCrossReference: parentTable or foreignTable is empty string, throwing"); + // Empty foreign table is also invalid — Thrift server rejects it + if (foreignTable != null && foreignTable.isEmpty()) { + LOGGER.debug("getCrossReference: foreignTable is empty string, throwing"); throw new DatabricksSQLException( - "Invalid argument: table name may not be empty", DatabricksDriverErrorCode.INVALID_STATE); + "Invalid argument: foreignTable may not be empty", + DatabricksDriverErrorCode.INVALID_STATE); } return session diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java index e1e8a9e2a..0501ff057 100644 --- a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java +++ b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java @@ -390,10 +390,11 @@ public DatabricksResultSet listExportedKeys( IDatabricksSession session, String catalog, String schema, String table) throws SQLException { LOGGER.debug("public ResultSet listExportedKeys() using SDK"); - if (table == null) { - LOGGER.debug("listExportedKeys: table is null, throwing"); + if (table == null || table.isEmpty()) { + LOGGER.debug("listExportedKeys: table is null or empty, throwing"); throw new DatabricksSQLException( - "Invalid argument: tableName may not be null", DatabricksDriverErrorCode.INVALID_STATE); + "Invalid argument: tableName may not be null or empty", + DatabricksDriverErrorCode.INVALID_STATE); } // Only fetch currentCatalog if multiple catalog support is disabled @@ -420,10 +421,9 @@ public DatabricksResultSet listCrossReferences( throws SQLException { LOGGER.debug("public ResultSet listCrossReferences() using SDK"); - // Null table on either side means "unspecified" — Thrift server returns empty ResultSet - if (foreignTable == null || parentTable == null) { - LOGGER.debug( - "listCrossReferences: foreignTable or parentTable is null, returning empty result set"); + // Null foreignTable means "unspecified" — Thrift server returns empty ResultSet + if (foreignTable == null) { + LOGGER.debug("listCrossReferences: foreignTable is null, returning empty result set"); return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>()); } @@ -519,14 +519,16 @@ private String autoFillCatalog(String catalog, String currentCatalog) { * (matching Thrift error behavior). When catalog is null, it is replaced with current_catalog and * (if schema is also null) schema is replaced with current_schema. * - * @throws DatabricksSQLException if table is null, or schema is null with an explicit catalog + * @throws DatabricksSQLException if table is null/empty, or schema is null with an explicit + * catalog */ private String[] resolveKeyBasedParams( String catalog, String schema, String table, IDatabricksSession session) throws SQLException { - if (table == null) { - LOGGER.debug("resolveKeyBasedParams: table is null, throwing"); + if (table == null || table.isEmpty()) { + LOGGER.debug("resolveKeyBasedParams: table is null or empty, throwing"); throw new DatabricksSQLException( - "Invalid argument: tableName may not be null", DatabricksDriverErrorCode.INVALID_STATE); + "Invalid argument: tableName may not be null or empty", + DatabricksDriverErrorCode.INVALID_STATE); } if (catalog == null) { diff --git a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java index 7ed0a9428..60aaca3d1 100644 --- a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java +++ b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java @@ -773,17 +773,18 @@ void testListCrossReferences_allForeignParamsNull_returnsEmpty() throws Exceptio } /** - * Tests that getCrossReference returns empty result set when parent table is null. Matches Thrift - * server behavior. + * Tests that getCrossReference returns empty result set when parent table is null but foreign + * table is specified. Thrift server requires parentTable, but the null check is at the + * DatabricksDatabaseMetaData layer. At this layer, null parentTable with null foreignTable + * returns empty since foreignTable == null triggers the early return. */ @Test - void testListCrossReferences_parentTableNull_returnsEmpty() throws Exception { + void testListCrossReferences_bothTablesNull_returnsEmpty() throws Exception { DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); DatabricksResultSet result = - metadataClient.listCrossReferences( - session, null, null, null, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE); - assertFalse(result.next(), "Should return empty when parent table is null"); + metadataClient.listCrossReferences(session, null, null, null, null, null, null); + assertFalse(result.next(), "Should return empty when both tables are null"); } @Test @@ -970,6 +971,21 @@ void testKeyBasedOpsThrowForNullTable() { "listImportedKeys should throw for null table"); } + @Test + void testKeyBasedOpsThrowForEmptyTable() { + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, ""), + "listPrimaryKeys should throw for empty table"); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""), + "listImportedKeys should throw for empty table"); + } + @Test void testKeyBasedOpsThrowForNullSchemaWithExplicitCatalog() { DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); @@ -993,6 +1009,11 @@ void testExportedKeysThrowsForNullTable() { DatabricksSQLException.class, () -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null), "listExportedKeys should throw for null table"); + + assertThrows( + DatabricksSQLException.class, + () -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""), + "listExportedKeys should throw for empty table"); } @Test diff --git a/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java b/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java index 6bf86f9c2..51082f0a0 100644 --- a/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java +++ b/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataNullResolutionTests.java @@ -147,6 +147,17 @@ void testGetPrimaryKeys_explicitCatalogNullSchemaThrows() throws SQLException { "Should throw when schema is null and catalog is explicitly provided"); } + @Test + @Order(6) + void testGetPrimaryKeys_emptyTableThrows() throws SQLException { + assertNotNull(connection); + DatabaseMetaData md = connection.getMetaData(); + assertThrows( + SQLException.class, + () -> md.getPrimaryKeys(testCatalog, TEST_SCHEMA_NAME, ""), + "Should throw when table is empty string"); + } + // ==================== getImportedKeys ==================== @Test @@ -285,15 +296,15 @@ void testGetCrossReference_nullForeignTableReturnsEmpty() throws SQLException { @Test @Order(24) - void testGetCrossReference_nullParentTableReturnsEmpty() throws SQLException { + void testGetCrossReference_nullParentTableThrows() throws SQLException { assertNotNull(connection); DatabaseMetaData md = connection.getMetaData(); - try (ResultSet rs = - md.getCrossReference( - testCatalog, TEST_SCHEMA_NAME, null, testCatalog, TEST_SCHEMA_NAME, CHILD_TABLE)) { - assertNotNull(rs); - assertFalse(rs.next(), "Should return empty when parent table is null"); - } + assertThrows( + SQLException.class, + () -> + md.getCrossReference( + testCatalog, TEST_SCHEMA_NAME, null, testCatalog, TEST_SCHEMA_NAME, CHILD_TABLE), + "Should throw when parent table is null"); } @Test From 4736e834227d344945a7a5a507589b74cb7dc282 Mon Sep 17 00:00:00 2001 From: Madhavendra Rathore Date: Thu, 9 Apr 2026 00:21:38 +0530 Subject: [PATCH 6/7] getExportedKeys: only throw for null table, not empty Thrift's getExportedKeys returns empty ResultSet for empty table name (it always returns empty in DBSQL). Match this behavior by only throwing for null, not empty string. Co-authored-by: Isaac Signed-off-by: Madhavendra Rathore --- .../impl/sqlexec/DatabricksMetadataQueryClient.java | 7 +++---- .../impl/sqlexec/DatabricksMetadataQueryClientTest.java | 5 ----- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java index 0501ff057..a4ee7f889 100644 --- a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java +++ b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java @@ -390,11 +390,10 @@ public DatabricksResultSet listExportedKeys( IDatabricksSession session, String catalog, String schema, String table) throws SQLException { LOGGER.debug("public ResultSet listExportedKeys() using SDK"); - if (table == null || table.isEmpty()) { - LOGGER.debug("listExportedKeys: table is null or empty, throwing"); + if (table == null) { + LOGGER.debug("listExportedKeys: table is null, throwing"); throw new DatabricksSQLException( - "Invalid argument: tableName may not be null or empty", - DatabricksDriverErrorCode.INVALID_STATE); + "Invalid argument: tableName may not be null", DatabricksDriverErrorCode.INVALID_STATE); } // Only fetch currentCatalog if multiple catalog support is disabled diff --git a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java index 60aaca3d1..c36408a2a 100644 --- a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java +++ b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java @@ -1009,11 +1009,6 @@ void testExportedKeysThrowsForNullTable() { DatabricksSQLException.class, () -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null), "listExportedKeys should throw for null table"); - - assertThrows( - DatabricksSQLException.class, - () -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""), - "listExportedKeys should throw for empty table"); } @Test From eb0a33cbe1ebe03263d3dcc9f6f0592aa4b7718d Mon Sep 17 00:00:00 2001 From: Madhavendra Rathore Date: Thu, 9 Apr 2026 01:14:44 +0530 Subject: [PATCH 7/7] Remove stale WireMock stubs for renamed tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Delete stub directories for tests that were renamed from *ReturnsEmpty to *Throws — these tests now throw before making any server call, so stubs are not needed. Co-authored-by: Isaac Signed-off-by: Madhavendra Rathore --- ...-6be71754-9abd-4237-8102-c7741cf9b843.json | 38 ------------------- ...-c0e71802-84ee-4dd7-8d24-960e141ced7c.json | 32 ---------------- ...-2a66b8b2-d6c3-4d3f-96c1-2f1423e22f7f.json | 33 ---------------- ...-d9dac8f1-3e37-4114-afee-263cb645c84b.json | 38 ------------------- ...-9e677901-7b79-4024-9bcd-046d454257d6.json | 32 ---------------- ...-b5c5ad7e-3125-485f-8681-ca5c96f75b82.json | 33 ---------------- ...-9bd52ee2-dd6e-457c-afc6-b1f09aa16415.json | 38 ------------------- ...-576ed39a-dbec-47ec-b115-b0824622b8c2.json | 32 ---------------- ...-0074dd8f-377f-4638-8f8c-2b1caa6197ba.json | 33 ---------------- ...-c648ba7b-ad27-45a7-8460-79196838f5bc.json | 38 ------------------- ...-f15603aa-870c-4520-910b-9e544c3bdbd5.json | 32 ---------------- ...-031f9c97-c01c-43cd-afb9-0bc2f5ca729b.json | 33 ---------------- 12 files changed, 412 deletions(-) delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/api_2.0_sql_sessions-6be71754-9abd-4237-8102-c7741cf9b843.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-8ece-1961-a7f5-afc8f666dc7c-c0e71802-84ee-4dd7-8d24-960e141ced7c.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-2a66b8b2-d6c3-4d3f-96c1-2f1423e22f7f.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions-d9dac8f1-3e37-4114-afee-263cb645c84b.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-7551-1c1e-a667-ba29671270b0-9e677901-7b79-4024-9bcd-046d454257d6.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-b5c5ad7e-3125-485f-8681-ca5c96f75b82.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/api_2.0_sql_sessions-9bd52ee2-dd6e-457c-afc6-b1f09aa16415.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/api_2.0_sql_sessions_01f13244-688e-173e-bfa9-59c2a0b7c7a5-576ed39a-dbec-47ec-b115-b0824622b8c2.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/oidc_.well-known_oauth-authorization-server-0074dd8f-377f-4638-8f8c-2b1caa6197ba.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions-c648ba7b-ad27-45a7-8460-79196838f5bc.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-669b-1591-9c90-d6040723b4e3-f15603aa-870c-4520-910b-9e544c3bdbd5.json delete mode 100644 src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-031f9c97-c01c-43cd-afb9-0bc2f5ca729b.json diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/api_2.0_sql_sessions-6be71754-9abd-4237-8102-c7741cf9b843.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/api_2.0_sql_sessions-6be71754-9abd-4237-8102-c7741cf9b843.json deleted file mode 100644 index c465a6a9d..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/api_2.0_sql_sessions-6be71754-9abd-4237-8102-c7741cf9b843.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "id" : "6be71754-9abd-4237-8102-c7741cf9b843", - "name" : "api_2.0_sql_sessions", - "request" : { - "url" : "/api/2.0/sql/sessions/", - "method" : "POST", - "bodyPatterns" : [ { - "equalToJson" : "{\"warehouse_id\":\"dd43ee29fedd958d\",\"schema\":\"default\",\"catalog\":\"SPARK\"}", - "ignoreArrayOrder" : true, - "ignoreExtraElements" : true - } ] - }, - "response" : { - "status" : 200, - "body" : "{\"session_id\":\"01f13244-8ece-1961-a7f5-afc8f666dc7c\"}", - "headers" : { - "x-request-id" : "25815af4-3f9e-4db5-a5e2-6f055ad20833", - "date" : "Tue, 7 Apr 2026 05:42:26 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "vary" : "Accept-Encoding", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json", - "server-timing" : "request_id;dur=0;desc=\"25815af4-3f9e-4db5-a5e2-6f055ad20833\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "6be71754-9abd-4237-8102-c7741cf9b843", - "insertionIndex" : 88 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-8ece-1961-a7f5-afc8f666dc7c-c0e71802-84ee-4dd7-8d24-960e141ced7c.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-8ece-1961-a7f5-afc8f666dc7c-c0e71802-84ee-4dd7-8d24-960e141ced7c.json deleted file mode 100644 index 788cac600..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-8ece-1961-a7f5-afc8f666dc7c-c0e71802-84ee-4dd7-8d24-960e141ced7c.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "id" : "c0e71802-84ee-4dd7-8d24-960e141ced7c", - "name" : "api_2.0_sql_sessions_01f13244-8ece-1961-a7f5-afc8f666dc7c", - "request" : { - "url" : "/api/2.0/sql/sessions/01f13244-8ece-1961-a7f5-afc8f666dc7c?warehouse_id=dd43ee29fedd958d", - "method" : "DELETE" - }, - "response" : { - "status" : 200, - "body" : "{}", - "headers" : { - "x-request-id" : "32a354b9-c4a0-410a-9a9b-7fc8ac973a0e", - "date" : "Tue, 7 Apr 2026 05:42:27 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json", - "server-timing" : "request_id;dur=0;desc=\"32a354b9-c4a0-410a-9a9b-7fc8ac973a0e\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "c0e71802-84ee-4dd7-8d24-960e141ced7c", - "insertionIndex" : 87 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-2a66b8b2-d6c3-4d3f-96c1-2f1423e22f7f.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-2a66b8b2-d6c3-4d3f-96c1-2f1423e22f7f.json deleted file mode 100644 index f6c3fbc65..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetcrossreference_nullparenttablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-2a66b8b2-d6c3-4d3f-96c1-2f1423e22f7f.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "id" : "2a66b8b2-d6c3-4d3f-96c1-2f1423e22f7f", - "name" : "oidc_.well-known_oauth-authorization-server", - "request" : { - "url" : "/oidc/.well-known/oauth-authorization-server", - "method" : "GET" - }, - "response" : { - "status" : 200, - "body" : "{\"authorization_endpoint\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\\/v1\\/authorize\",\"token_endpoint\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\\/v1\\/token\",\"issuer\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\",\"jwks_uri\":\"https:\\/\\/oregon.staging.cloud.databricks.com\\/oidc\\/jwks.json\",\"scopes_supported\":[\"access-management\",\"alerts\",\"all-apis\",\"apps\",\"authentication\",\"billing\",\"cleanrooms\",\"clusters\",\"command-execution\",\"custom-llms\",\"dashboards\",\"dataclassification\",\"dataquality\",\"email\",\"environments\",\"files\",\"forecasting\",\"genie\",\"global-init-scripts\",\"identity\",\"instance-pools\",\"instance-profiles\",\"jobs\",\"knowledge-assistants\",\"libraries\",\"marketplace\",\"mlflow\",\"model-serving\",\"networking\",\"notifications\",\"offline_access\",\"openid\",\"pipelines\",\"postgres\",\"profile\",\"provisioning\",\"qualitymonitor\",\"query-history\",\"scim\",\"secrets\",\"settings\",\"sharing\",\"sql\",\"tags\",\"unity-catalog\",\"vector-search\",\"workspace\"],\"response_types_supported\":[\"code\",\"id_token\"],\"response_modes_supported\":[\"query\",\"fragment\",\"form_post\"],\"grant_types_supported\":[\"client_credentials\",\"authorization_code\",\"refresh_token\"],\"code_challenge_methods_supported\":[\"S256\"],\"token_endpoint_auth_methods_supported\":[\"client_secret_basic\",\"client_secret_post\",\"none\"],\"subject_types_supported\":[\"public\"],\"id_token_signing_alg_values_supported\":[\"RS256\"],\"claims_supported\":[\"iss\",\"sub\",\"aud\",\"iat\",\"exp\",\"jti\",\"name\",\"family_name\",\"given_name\",\"preferred_username\"],\"request_uri_parameter_supported\":false}", - "headers" : { - "x-request-id" : "8afb654a-89db-492d-aa5e-23b5bd1e7b8f", - "date" : "Tue, 7 Apr 2026 05:42:24 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "vary" : "Accept-Encoding", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json; charset=UTF-8", - "server-timing" : "request_id;dur=0;desc=\"8afb654a-89db-492d-aa5e-23b5bd1e7b8f\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "2a66b8b2-d6c3-4d3f-96c1-2f1423e22f7f", - "insertionIndex" : 89 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions-d9dac8f1-3e37-4114-afee-263cb645c84b.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions-d9dac8f1-3e37-4114-afee-263cb645c84b.json deleted file mode 100644 index 8ea7611e5..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions-d9dac8f1-3e37-4114-afee-263cb645c84b.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "id" : "d9dac8f1-3e37-4114-afee-263cb645c84b", - "name" : "api_2.0_sql_sessions", - "request" : { - "url" : "/api/2.0/sql/sessions/", - "method" : "POST", - "bodyPatterns" : [ { - "equalToJson" : "{\"warehouse_id\":\"dd43ee29fedd958d\",\"schema\":\"default\",\"catalog\":\"SPARK\"}", - "ignoreArrayOrder" : true, - "ignoreExtraElements" : true - } ] - }, - "response" : { - "status" : 200, - "body" : "{\"session_id\":\"01f13244-7551-1c1e-a667-ba29671270b0\"}", - "headers" : { - "x-request-id" : "f0837cfd-2058-494a-8b50-f179b1426f29", - "date" : "Tue, 7 Apr 2026 05:41:43 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "vary" : "Accept-Encoding", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json", - "server-timing" : "request_id;dur=0;desc=\"f0837cfd-2058-494a-8b50-f179b1426f29\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "d9dac8f1-3e37-4114-afee-263cb645c84b", - "insertionIndex" : 56 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-7551-1c1e-a667-ba29671270b0-9e677901-7b79-4024-9bcd-046d454257d6.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-7551-1c1e-a667-ba29671270b0-9e677901-7b79-4024-9bcd-046d454257d6.json deleted file mode 100644 index 260448615..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-7551-1c1e-a667-ba29671270b0-9e677901-7b79-4024-9bcd-046d454257d6.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "id" : "9e677901-7b79-4024-9bcd-046d454257d6", - "name" : "api_2.0_sql_sessions_01f13244-7551-1c1e-a667-ba29671270b0", - "request" : { - "url" : "/api/2.0/sql/sessions/01f13244-7551-1c1e-a667-ba29671270b0?warehouse_id=dd43ee29fedd958d", - "method" : "DELETE" - }, - "response" : { - "status" : 200, - "body" : "{}", - "headers" : { - "x-request-id" : "39f63054-7f43-45d9-8603-20fdba70b2e8", - "date" : "Tue, 7 Apr 2026 05:41:44 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json", - "server-timing" : "request_id;dur=0;desc=\"39f63054-7f43-45d9-8603-20fdba70b2e8\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "9e677901-7b79-4024-9bcd-046d454257d6", - "insertionIndex" : 55 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-b5c5ad7e-3125-485f-8681-ca5c96f75b82.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-b5c5ad7e-3125-485f-8681-ca5c96f75b82.json deleted file mode 100644 index 884108b21..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetimportedkeys_nulltablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-b5c5ad7e-3125-485f-8681-ca5c96f75b82.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "id" : "b5c5ad7e-3125-485f-8681-ca5c96f75b82", - "name" : "oidc_.well-known_oauth-authorization-server", - "request" : { - "url" : "/oidc/.well-known/oauth-authorization-server", - "method" : "GET" - }, - "response" : { - "status" : 200, - "body" : "{\"authorization_endpoint\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\\/v1\\/authorize\",\"token_endpoint\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\\/v1\\/token\",\"issuer\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\",\"jwks_uri\":\"https:\\/\\/oregon.staging.cloud.databricks.com\\/oidc\\/jwks.json\",\"scopes_supported\":[\"access-management\",\"alerts\",\"all-apis\",\"apps\",\"authentication\",\"billing\",\"cleanrooms\",\"clusters\",\"command-execution\",\"custom-llms\",\"dashboards\",\"dataclassification\",\"dataquality\",\"email\",\"environments\",\"files\",\"forecasting\",\"genie\",\"global-init-scripts\",\"identity\",\"instance-pools\",\"instance-profiles\",\"jobs\",\"knowledge-assistants\",\"libraries\",\"marketplace\",\"mlflow\",\"model-serving\",\"networking\",\"notifications\",\"offline_access\",\"openid\",\"pipelines\",\"postgres\",\"profile\",\"provisioning\",\"qualitymonitor\",\"query-history\",\"scim\",\"secrets\",\"settings\",\"sharing\",\"sql\",\"tags\",\"unity-catalog\",\"vector-search\",\"workspace\"],\"response_types_supported\":[\"code\",\"id_token\"],\"response_modes_supported\":[\"query\",\"fragment\",\"form_post\"],\"grant_types_supported\":[\"client_credentials\",\"authorization_code\",\"refresh_token\"],\"code_challenge_methods_supported\":[\"S256\"],\"token_endpoint_auth_methods_supported\":[\"client_secret_basic\",\"client_secret_post\",\"none\"],\"subject_types_supported\":[\"public\"],\"id_token_signing_alg_values_supported\":[\"RS256\"],\"claims_supported\":[\"iss\",\"sub\",\"aud\",\"iat\",\"exp\",\"jti\",\"name\",\"family_name\",\"given_name\",\"preferred_username\"],\"request_uri_parameter_supported\":false}", - "headers" : { - "x-request-id" : "a6a01f06-a7ae-49ec-af8f-53c8a34d3195", - "date" : "Tue, 7 Apr 2026 05:41:42 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "vary" : "Accept-Encoding", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json; charset=UTF-8", - "server-timing" : "request_id;dur=0;desc=\"a6a01f06-a7ae-49ec-af8f-53c8a34d3195\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "b5c5ad7e-3125-485f-8681-ca5c96f75b82", - "insertionIndex" : 57 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/api_2.0_sql_sessions-9bd52ee2-dd6e-457c-afc6-b1f09aa16415.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/api_2.0_sql_sessions-9bd52ee2-dd6e-457c-afc6-b1f09aa16415.json deleted file mode 100644 index ef8e6a8a0..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/api_2.0_sql_sessions-9bd52ee2-dd6e-457c-afc6-b1f09aa16415.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "id" : "9bd52ee2-dd6e-457c-afc6-b1f09aa16415", - "name" : "api_2.0_sql_sessions", - "request" : { - "url" : "/api/2.0/sql/sessions/", - "method" : "POST", - "bodyPatterns" : [ { - "equalToJson" : "{\"warehouse_id\":\"dd43ee29fedd958d\",\"schema\":\"default\",\"catalog\":\"SPARK\"}", - "ignoreArrayOrder" : true, - "ignoreExtraElements" : true - } ] - }, - "response" : { - "status" : 200, - "body" : "{\"session_id\":\"01f13244-688e-173e-bfa9-59c2a0b7c7a5\"}", - "headers" : { - "x-request-id" : "ef54b6d2-79b5-4133-a63a-a4a90d636140", - "date" : "Tue, 7 Apr 2026 05:41:21 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "vary" : "Accept-Encoding", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json", - "server-timing" : "request_id;dur=0;desc=\"ef54b6d2-79b5-4133-a63a-a4a90d636140\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "9bd52ee2-dd6e-457c-afc6-b1f09aa16415", - "insertionIndex" : 38 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/api_2.0_sql_sessions_01f13244-688e-173e-bfa9-59c2a0b7c7a5-576ed39a-dbec-47ec-b115-b0824622b8c2.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/api_2.0_sql_sessions_01f13244-688e-173e-bfa9-59c2a0b7c7a5-576ed39a-dbec-47ec-b115-b0824622b8c2.json deleted file mode 100644 index c11fec8a1..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/api_2.0_sql_sessions_01f13244-688e-173e-bfa9-59c2a0b7c7a5-576ed39a-dbec-47ec-b115-b0824622b8c2.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "id" : "576ed39a-dbec-47ec-b115-b0824622b8c2", - "name" : "api_2.0_sql_sessions_01f13244-688e-173e-bfa9-59c2a0b7c7a5", - "request" : { - "url" : "/api/2.0/sql/sessions/01f13244-688e-173e-bfa9-59c2a0b7c7a5?warehouse_id=dd43ee29fedd958d", - "method" : "DELETE" - }, - "response" : { - "status" : 200, - "body" : "{}", - "headers" : { - "x-request-id" : "e7dbe7c2-4ef6-4a70-9cf0-13d10c9e6075", - "date" : "Tue, 7 Apr 2026 05:41:22 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json", - "server-timing" : "request_id;dur=0;desc=\"e7dbe7c2-4ef6-4a70-9cf0-13d10c9e6075\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "576ed39a-dbec-47ec-b115-b0824622b8c2", - "insertionIndex" : 37 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/oidc_.well-known_oauth-authorization-server-0074dd8f-377f-4638-8f8c-2b1caa6197ba.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/oidc_.well-known_oauth-authorization-server-0074dd8f-377f-4638-8f8c-2b1caa6197ba.json deleted file mode 100644 index 0a7806636..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_explicitcatalognullschemareturnsempty/mappings/oidc_.well-known_oauth-authorization-server-0074dd8f-377f-4638-8f8c-2b1caa6197ba.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "id" : "0074dd8f-377f-4638-8f8c-2b1caa6197ba", - "name" : "oidc_.well-known_oauth-authorization-server", - "request" : { - "url" : "/oidc/.well-known/oauth-authorization-server", - "method" : "GET" - }, - "response" : { - "status" : 200, - "body" : "{\"authorization_endpoint\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\\/v1\\/authorize\",\"token_endpoint\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\\/v1\\/token\",\"issuer\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\",\"jwks_uri\":\"https:\\/\\/oregon.staging.cloud.databricks.com\\/oidc\\/jwks.json\",\"scopes_supported\":[\"access-management\",\"alerts\",\"all-apis\",\"apps\",\"authentication\",\"billing\",\"cleanrooms\",\"clusters\",\"command-execution\",\"custom-llms\",\"dashboards\",\"dataclassification\",\"dataquality\",\"email\",\"environments\",\"files\",\"forecasting\",\"genie\",\"global-init-scripts\",\"identity\",\"instance-pools\",\"instance-profiles\",\"jobs\",\"knowledge-assistants\",\"libraries\",\"marketplace\",\"mlflow\",\"model-serving\",\"networking\",\"notifications\",\"offline_access\",\"openid\",\"pipelines\",\"postgres\",\"profile\",\"provisioning\",\"qualitymonitor\",\"query-history\",\"scim\",\"secrets\",\"settings\",\"sharing\",\"sql\",\"tags\",\"unity-catalog\",\"vector-search\",\"workspace\"],\"response_types_supported\":[\"code\",\"id_token\"],\"response_modes_supported\":[\"query\",\"fragment\",\"form_post\"],\"grant_types_supported\":[\"client_credentials\",\"authorization_code\",\"refresh_token\"],\"code_challenge_methods_supported\":[\"S256\"],\"token_endpoint_auth_methods_supported\":[\"client_secret_basic\",\"client_secret_post\",\"none\"],\"subject_types_supported\":[\"public\"],\"id_token_signing_alg_values_supported\":[\"RS256\"],\"claims_supported\":[\"iss\",\"sub\",\"aud\",\"iat\",\"exp\",\"jti\",\"name\",\"family_name\",\"given_name\",\"preferred_username\"],\"request_uri_parameter_supported\":false}", - "headers" : { - "x-request-id" : "0cb4069c-4008-4f9c-b91a-fedf64e2cda2", - "date" : "Tue, 7 Apr 2026 05:41:20 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "vary" : "Accept-Encoding", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json; charset=UTF-8", - "server-timing" : "request_id;dur=0;desc=\"0cb4069c-4008-4f9c-b91a-fedf64e2cda2\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "0074dd8f-377f-4638-8f8c-2b1caa6197ba", - "insertionIndex" : 39 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions-c648ba7b-ad27-45a7-8460-79196838f5bc.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions-c648ba7b-ad27-45a7-8460-79196838f5bc.json deleted file mode 100644 index d20818fd5..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions-c648ba7b-ad27-45a7-8460-79196838f5bc.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "id" : "c648ba7b-ad27-45a7-8460-79196838f5bc", - "name" : "api_2.0_sql_sessions", - "request" : { - "url" : "/api/2.0/sql/sessions/", - "method" : "POST", - "bodyPatterns" : [ { - "equalToJson" : "{\"warehouse_id\":\"dd43ee29fedd958d\",\"schema\":\"default\",\"catalog\":\"SPARK\"}", - "ignoreArrayOrder" : true, - "ignoreExtraElements" : true - } ] - }, - "response" : { - "status" : 200, - "body" : "{\"session_id\":\"01f13244-669b-1591-9c90-d6040723b4e3\"}", - "headers" : { - "x-request-id" : "42321d7d-b2c8-496f-a291-86f9e45fdca5", - "date" : "Tue, 7 Apr 2026 05:41:18 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "vary" : "Accept-Encoding", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json", - "server-timing" : "request_id;dur=0;desc=\"42321d7d-b2c8-496f-a291-86f9e45fdca5\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "c648ba7b-ad27-45a7-8460-79196838f5bc", - "insertionIndex" : 34 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-669b-1591-9c90-d6040723b4e3-f15603aa-870c-4520-910b-9e544c3bdbd5.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-669b-1591-9c90-d6040723b4e3-f15603aa-870c-4520-910b-9e544c3bdbd5.json deleted file mode 100644 index 075bb6a61..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/api_2.0_sql_sessions_01f13244-669b-1591-9c90-d6040723b4e3-f15603aa-870c-4520-910b-9e544c3bdbd5.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "id" : "f15603aa-870c-4520-910b-9e544c3bdbd5", - "name" : "api_2.0_sql_sessions_01f13244-669b-1591-9c90-d6040723b4e3", - "request" : { - "url" : "/api/2.0/sql/sessions/01f13244-669b-1591-9c90-d6040723b4e3?warehouse_id=dd43ee29fedd958d", - "method" : "DELETE" - }, - "response" : { - "status" : 200, - "body" : "{}", - "headers" : { - "x-request-id" : "08885b6d-4ba7-4ff6-9e24-5f41034abbe8", - "date" : "Tue, 7 Apr 2026 05:41:19 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json", - "server-timing" : "request_id;dur=0;desc=\"08885b6d-4ba7-4ff6-9e24-5f41034abbe8\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "f15603aa-870c-4520-910b-9e544c3bdbd5", - "insertionIndex" : 33 -} \ No newline at end of file diff --git a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-031f9c97-c01c-43cd-afb9-0bc2f5ca729b.json b/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-031f9c97-c01c-43cd-afb9-0bc2f5ca729b.json deleted file mode 100644 index 7c3e90eeb..000000000 --- a/src/test/resources/sqlexecapi/metadatanullresolutiontests/testgetprimarykeys_nulltablereturnsempty/mappings/oidc_.well-known_oauth-authorization-server-031f9c97-c01c-43cd-afb9-0bc2f5ca729b.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "id" : "031f9c97-c01c-43cd-afb9-0bc2f5ca729b", - "name" : "oidc_.well-known_oauth-authorization-server", - "request" : { - "url" : "/oidc/.well-known/oauth-authorization-server", - "method" : "GET" - }, - "response" : { - "status" : 200, - "body" : "{\"authorization_endpoint\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\\/v1\\/authorize\",\"token_endpoint\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\\/v1\\/token\",\"issuer\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\",\"jwks_uri\":\"https:\\/\\/oregon.staging.cloud.databricks.com\\/oidc\\/jwks.json\",\"scopes_supported\":[\"access-management\",\"alerts\",\"all-apis\",\"apps\",\"authentication\",\"billing\",\"cleanrooms\",\"clusters\",\"command-execution\",\"custom-llms\",\"dashboards\",\"dataclassification\",\"dataquality\",\"email\",\"environments\",\"files\",\"forecasting\",\"genie\",\"global-init-scripts\",\"identity\",\"instance-pools\",\"instance-profiles\",\"jobs\",\"knowledge-assistants\",\"libraries\",\"marketplace\",\"mlflow\",\"model-serving\",\"networking\",\"notifications\",\"offline_access\",\"openid\",\"pipelines\",\"postgres\",\"profile\",\"provisioning\",\"qualitymonitor\",\"query-history\",\"scim\",\"secrets\",\"settings\",\"sharing\",\"sql\",\"tags\",\"unity-catalog\",\"vector-search\",\"workspace\"],\"response_types_supported\":[\"code\",\"id_token\"],\"response_modes_supported\":[\"query\",\"fragment\",\"form_post\"],\"grant_types_supported\":[\"client_credentials\",\"authorization_code\",\"refresh_token\"],\"code_challenge_methods_supported\":[\"S256\"],\"token_endpoint_auth_methods_supported\":[\"client_secret_basic\",\"client_secret_post\",\"none\"],\"subject_types_supported\":[\"public\"],\"id_token_signing_alg_values_supported\":[\"RS256\"],\"claims_supported\":[\"iss\",\"sub\",\"aud\",\"iat\",\"exp\",\"jti\",\"name\",\"family_name\",\"given_name\",\"preferred_username\"],\"request_uri_parameter_supported\":false}", - "headers" : { - "x-request-id" : "ff05cc42-2265-4b0e-a74a-263677475fee", - "date" : "Tue, 7 Apr 2026 05:41:17 GMT", - "server" : "databricks", - "x-databricks-popp-response-code-details" : "via_upstream", - "x-databricks-shard-debug" : "oregon-staging", - "vary" : "Accept-Encoding", - "x-databricks-popp-fast-path-routing-reason" : "not_eligible", - "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false", - "x-databricks-upstream-cluster" : "oregon-staging-h2", - "x-databricks-org-id" : "6051921418418893", - "strict-transport-security" : "max-age=31536000; includeSubDomains; preload", - "x-content-type-options" : "nosniff", - "x-databricks-popp-routing-reason" : "deployment-name", - "content-type" : "application/json; charset=UTF-8", - "server-timing" : "request_id;dur=0;desc=\"ff05cc42-2265-4b0e-a74a-263677475fee\", client_protocol;dur=0;desc=\"HTTP/1.1\"", - "alt-svc" : "h3=\":443\"; ma=86400, h3-29=\":443\"; ma=86400", - "x-databricks-apiproxy-response-code-details" : "via_upstream" - } - }, - "uuid" : "031f9c97-c01c-43cd-afb9-0bc2f5ca729b", - "insertionIndex" : 35 -} \ No newline at end of file