Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1132,9 +1132,18 @@ public ResultSet getCrossReference(
foreignTable));

throwExceptionIfConnectionIsClosed();
if (parentTable == null && foreignTable == null) {
// Thrift requires parentTable — null or empty parentTable is invalid
if (parentTable == null || parentTable.isEmpty()) {
LOGGER.debug("getCrossReference: parentTable is null or empty, throwing");
throw new DatabricksSQLException(
"Invalid argument: foreignTable and parentTableName are both null",
"Invalid argument: parentTable may not be null or empty",
DatabricksDriverErrorCode.INVALID_STATE);
}
// Empty foreign table is also invalid — Thrift server rejects it
if (foreignTable != null && foreignTable.isEmpty()) {
LOGGER.debug("getCrossReference: foreignTable is empty string, throwing");
throw new DatabricksSQLException(
"Invalid argument: foreignTable may not be empty",
DatabricksDriverErrorCode.INVALID_STATE);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@
import com.databricks.jdbc.dbclient.IDatabricksMetadataClient;
import com.databricks.jdbc.dbclient.impl.common.CommandConstants;
import com.databricks.jdbc.dbclient.impl.common.MetadataResultSetBuilder;
import com.databricks.jdbc.exception.DatabricksSQLException;
import com.databricks.jdbc.log.JdbcLogger;
import com.databricks.jdbc.log.JdbcLoggerFactory;
import com.databricks.jdbc.model.telemetry.enums.DatabricksDriverErrorCode;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
Expand Down Expand Up @@ -324,19 +326,6 @@ public DatabricksResultSet listPrimaryKeys(
catalog = autoFillCatalog(catalog, currentCatalog);

String[] resolvedParams = resolveKeyBasedParams(catalog, schema, table, session);
if (resolvedParams == null) {
LOGGER.debug(
"Could not resolve key-based params (catalog={}, schema={}, table={}), returning empty result set for listPrimaryKeys",
catalog,
schema,
table);
return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns(
PRIMARY_KEYS_COLUMNS,
new ArrayList<>(),
METADATA_STATEMENT_ID,
com.databricks.jdbc.common.CommandName.LIST_PRIMARY_KEYS);
}

String resolvedCatalog = resolvedParams[0];
String resolvedSchema = resolvedParams[1];
String resolvedTable = resolvedParams[2];
Expand Down Expand Up @@ -373,19 +362,6 @@ public DatabricksResultSet listImportedKeys(
catalog = autoFillCatalog(catalog, currentCatalog);

String[] resolvedParams = resolveKeyBasedParams(catalog, schema, table, session);
if (resolvedParams == null) {
LOGGER.debug(
"Could not resolve key-based params (catalog={}, schema={}, table={}), returning empty result set for listImportedKeys",
catalog,
schema,
table);
return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns(
IMPORTED_KEYS_COLUMNS,
new ArrayList<>(),
METADATA_STATEMENT_ID,
com.databricks.jdbc.common.CommandName.GET_IMPORTED_KEYS);
}

String resolvedCatalog = resolvedParams[0];
String resolvedSchema = resolvedParams[1];
String resolvedTable = resolvedParams[2];
Expand Down Expand Up @@ -414,6 +390,12 @@ public DatabricksResultSet listExportedKeys(
IDatabricksSession session, String catalog, String schema, String table) throws SQLException {
LOGGER.debug("public ResultSet listExportedKeys() using SDK");

if (table == null) {
LOGGER.debug("listExportedKeys: table is null, throwing");
throw new DatabricksSQLException(
"Invalid argument: tableName may not be null", DatabricksDriverErrorCode.INVALID_STATE);
}

// Only fetch currentCatalog if multiple catalog support is disabled
String currentCatalog = isMultipleCatalogSupportDisabled() ? session.getCurrentCatalog() : null;
if (!metadataResultSetBuilder.shouldAllowCatalogAccess(catalog, currentCatalog, session)) {
Expand All @@ -438,6 +420,12 @@ public DatabricksResultSet listCrossReferences(
throws SQLException {
LOGGER.debug("public ResultSet listCrossReferences() using SDK");

// Null foreignTable means "unspecified" — Thrift server returns empty ResultSet
if (foreignTable == null) {
LOGGER.debug("listCrossReferences: foreignTable is null, returning empty result set");
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
}

// Only fetch currentCatalog if multiple catalog support is disabled
String currentCatalog = isMultipleCatalogSupportDisabled() ? session.getCurrentCatalog() : null;
if (!metadataResultSetBuilder.shouldAllowCatalogAccess(parentCatalog, currentCatalog, session)
Expand All @@ -446,29 +434,12 @@ public DatabricksResultSet listCrossReferences(
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
}

// Resolve null params for the foreign side (used to build the SQL query)
// Resolve null catalog/schema for the foreign side (used to build the SQL query)
String[] resolvedForeignParams =
resolveKeyBasedParams(foreignCatalog, foreignSchema, foreignTable, session);
if (resolvedForeignParams == null) {
LOGGER.debug(
"Could not resolve foreign key-based params (catalog={}, schema={}, table={}), returning empty result set",
foreignCatalog,
foreignSchema,
foreignTable);
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
}

// Resolve null params for the parent side (used for filtering results)
// Resolve null catalog/schema for the parent side (used for filtering results)
String[] resolvedParentParams =
resolveKeyBasedParams(parentCatalog, parentSchema, parentTable, session);
if (resolvedParentParams == null) {
LOGGER.debug(
"Could not resolve parent key-based params (catalog={}, schema={}, table={}), returning empty result set",
parentCatalog,
parentSchema,
parentTable);
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
}

String resolvedForeignCatalog = resolvedForeignParams[0];
String resolvedForeignSchema = resolvedForeignParams[1];
Expand Down Expand Up @@ -542,15 +513,21 @@ private String autoFillCatalog(String catalog, String currentCatalog) {
}

/**
* Resolves null catalog/schema for key-based metadata operations to match Thrift server behavior.
* When catalog is null, it is replaced with current_catalog and (if schema is also null) schema
* is replaced with current_schema. Returns null if the caller should return an empty result set
* (table is null, schema is null without catalog also being null, or any resolved value is null).
* Validates and resolves null catalog/schema/table for key-based metadata operations to match
* Thrift server behavior. Throws DatabricksSQLException for invalid parameter combinations
* (matching Thrift error behavior). When catalog is null, it is replaced with current_catalog and
* (if schema is also null) schema is replaced with current_schema.
*
* @throws DatabricksSQLException if table is null/empty, or schema is null with an explicit
* catalog
*/
private String[] resolveKeyBasedParams(
String catalog, String schema, String table, IDatabricksSession session) throws SQLException {
if (table == null) {
return null;
if (table == null || table.isEmpty()) {
LOGGER.debug("resolveKeyBasedParams: table is null or empty, throwing");
throw new DatabricksSQLException(
"Invalid argument: tableName may not be null or empty",
DatabricksDriverErrorCode.INVALID_STATE);
}

if (catalog == null) {
Expand All @@ -560,11 +537,22 @@ private String[] resolveKeyBasedParams(
schema = currentCatalogAndSchema[1];
}
} else if (schema == null) {
return null;
LOGGER.debug(
"resolveKeyBasedParams: schema is null with explicit catalog '{}', throwing", catalog);
throw new DatabricksSQLException(
"Invalid argument: schema may not be null when catalog is specified",
DatabricksDriverErrorCode.INVALID_STATE);
}

// Safety net: getCurrentCatalogAndSchema() returned null values
if (catalog == null || schema == null) {
return null;
LOGGER.debug(
"resolveKeyBasedParams: could not resolve catalog or schema (catalog={}, schema={})",
catalog,
schema);
throw new DatabricksSQLException(
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why this exception? this will throw when catalog= null, but schema is valid

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a safety net for when getCurrentCatalogAndSchema() returns null values (server error edge case). In normal flow: if catalog was null it gets resolved at line 533, and if schema was also null it gets resolved at line 536 — so both are non-null by this point. This check only triggers if the server returns null for current_catalog or current_schema. Added a comment and debug log with the actual values for clarity.

"Invalid argument: could not resolve catalog or schema",
DatabricksDriverErrorCode.INVALID_STATE);
}

return new String[] {catalog, schema, table};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -759,9 +759,8 @@ void testListCrossReferences() throws Exception {
}

/**
* Tests that getCrossReference returns empty result set (not an exception) when all three
* foreign-side parameters are null. Matches Thrift server behavior where null foreign table
* delegates to getExportedKeys which returns empty in DBSQL.
* Tests that getCrossReference returns empty result set (not an exception) when foreign table is
* null. Matches Thrift server behavior where null table means "unspecified" and returns empty.
*/
@Test
void testListCrossReferences_allForeignParamsNull_returnsEmpty() throws Exception {
Expand All @@ -770,7 +769,22 @@ void testListCrossReferences_allForeignParamsNull_returnsEmpty() throws Exceptio
DatabricksResultSet result =
metadataClient.listCrossReferences(
session, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE, null, null, null);
assertFalse(result.next(), "Should return empty when all foreign params are null, not throw");
assertFalse(result.next(), "Should return empty when foreign table is null");
}

/**
* Tests that getCrossReference returns empty result set when parent table is null but foreign
* table is specified. Thrift server requires parentTable, but the null check is at the
* DatabricksDatabaseMetaData layer. At this layer, null parentTable with null foreignTable
* returns empty since foreignTable == null triggers the early return.
*/
@Test
void testListCrossReferences_bothTablesNull_returnsEmpty() throws Exception {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

DatabricksResultSet result =
metadataClient.listCrossReferences(session, null, null, null, null, null, null);
assertFalse(result.next(), "Should return empty when both tables are null");
}

@Test
Expand Down Expand Up @@ -943,40 +957,58 @@ void testListFunctionsWithNullCatalog() throws SQLException {
}

@Test
void testKeyBasedOpsReturnEmptyForNullTable() throws SQLException {
void testKeyBasedOpsThrowForNullTable() {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

// null table should return empty for listPrimaryKeys
DatabricksResultSet pkResult =
metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, null);
assertNotNull(pkResult);
assertFalse(pkResult.next(), "Expected empty result set for listPrimaryKeys with null table");

// null table should return empty for listImportedKeys
DatabricksResultSet ikResult =
metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null);
assertNotNull(ikResult);
assertFalse(ikResult.next(), "Expected empty result set for listImportedKeys with null table");
assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, null),
"listPrimaryKeys should throw for null table");

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null),
"listImportedKeys should throw for null table");
}

@Test
void testKeyBasedOpsReturnEmptyForNullSchemaWithExplicitCatalog() throws SQLException {
void testKeyBasedOpsThrowForEmptyTable() {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

// schema=null with explicit catalog should return empty (matching Thrift behavior)
DatabricksResultSet pkResult =
metadataClient.listPrimaryKeys(session, "any_catalog", null, TEST_TABLE);
assertNotNull(pkResult);
assertFalse(
pkResult.next(),
"Expected empty result set for listPrimaryKeys with null schema and explicit catalog");

DatabricksResultSet ikResult =
metadataClient.listImportedKeys(session, "any_catalog", null, TEST_TABLE);
assertNotNull(ikResult);
assertFalse(
ikResult.next(),
"Expected empty result set for listImportedKeys with null schema and explicit catalog");
assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, ""),
"listPrimaryKeys should throw for empty table");

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""),
"listImportedKeys should throw for empty table");
}

@Test
void testKeyBasedOpsThrowForNullSchemaWithExplicitCatalog() {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listPrimaryKeys(session, "any_catalog", null, TEST_TABLE),
"listPrimaryKeys should throw for null schema with explicit catalog");

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listImportedKeys(session, "any_catalog", null, TEST_TABLE),
"listImportedKeys should throw for null schema with explicit catalog");
}

@Test
void testExportedKeysThrowsForNullTable() {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null),
"listExportedKeys should throw for null table");
}

@Test
Expand Down
Loading
Loading