Skip to content
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -1132,9 +1132,11 @@ public ResultSet getCrossReference(
foreignTable));

throwExceptionIfConnectionIsClosed();
if (parentTable == null && foreignTable == null) {
boolean parentTableMissing = parentTable == null || parentTable.isEmpty();
boolean foreignTableMissing = foreignTable == null || foreignTable.isEmpty();
if (parentTableMissing && foreignTableMissing) {
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

add logging

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done — added LOGGER.debug() before all throw sites in the latest commits.

throw new DatabricksSQLException(
"Invalid argument: foreignTable and parentTableName are both null",
"Invalid argument: foreignTable and parentTableName are both null or empty",
DatabricksDriverErrorCode.INVALID_STATE);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@
import com.databricks.jdbc.dbclient.IDatabricksMetadataClient;
import com.databricks.jdbc.dbclient.impl.common.CommandConstants;
import com.databricks.jdbc.dbclient.impl.common.MetadataResultSetBuilder;
import com.databricks.jdbc.exception.DatabricksSQLException;
import com.databricks.jdbc.log.JdbcLogger;
import com.databricks.jdbc.log.JdbcLoggerFactory;
import com.databricks.jdbc.model.telemetry.enums.DatabricksDriverErrorCode;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
Expand Down Expand Up @@ -324,19 +326,6 @@ public DatabricksResultSet listPrimaryKeys(
catalog = autoFillCatalog(catalog, currentCatalog);

String[] resolvedParams = resolveKeyBasedParams(catalog, schema, table, session);
if (resolvedParams == null) {
LOGGER.debug(
"Could not resolve key-based params (catalog={}, schema={}, table={}), returning empty result set for listPrimaryKeys",
catalog,
schema,
table);
return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns(
PRIMARY_KEYS_COLUMNS,
new ArrayList<>(),
METADATA_STATEMENT_ID,
com.databricks.jdbc.common.CommandName.LIST_PRIMARY_KEYS);
}

String resolvedCatalog = resolvedParams[0];
String resolvedSchema = resolvedParams[1];
String resolvedTable = resolvedParams[2];
Expand Down Expand Up @@ -373,19 +362,6 @@ public DatabricksResultSet listImportedKeys(
catalog = autoFillCatalog(catalog, currentCatalog);

String[] resolvedParams = resolveKeyBasedParams(catalog, schema, table, session);
if (resolvedParams == null) {
LOGGER.debug(
"Could not resolve key-based params (catalog={}, schema={}, table={}), returning empty result set for listImportedKeys",
catalog,
schema,
table);
return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns(
IMPORTED_KEYS_COLUMNS,
new ArrayList<>(),
METADATA_STATEMENT_ID,
com.databricks.jdbc.common.CommandName.GET_IMPORTED_KEYS);
}

String resolvedCatalog = resolvedParams[0];
String resolvedSchema = resolvedParams[1];
String resolvedTable = resolvedParams[2];
Expand Down Expand Up @@ -414,6 +390,12 @@ public DatabricksResultSet listExportedKeys(
IDatabricksSession session, String catalog, String schema, String table) throws SQLException {
LOGGER.debug("public ResultSet listExportedKeys() using SDK");

if (table == null || table.isEmpty()) {
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

add logging

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done — added LOGGER.debug() before the throw.

throw new DatabricksSQLException(
"Invalid argument: tableName may not be null or empty",
DatabricksDriverErrorCode.INVALID_STATE);
}

// Only fetch currentCatalog if multiple catalog support is disabled
String currentCatalog = isMultipleCatalogSupportDisabled() ? session.getCurrentCatalog() : null;
if (!metadataResultSetBuilder.shouldAllowCatalogAccess(catalog, currentCatalog, session)) {
Expand Down Expand Up @@ -446,29 +428,12 @@ public DatabricksResultSet listCrossReferences(
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
}

// Resolve null params for the foreign side (used to build the SQL query)
// Resolve null/empty params for the foreign side (used to build the SQL query)
String[] resolvedForeignParams =
resolveKeyBasedParams(foreignCatalog, foreignSchema, foreignTable, session);
if (resolvedForeignParams == null) {
LOGGER.debug(
"Could not resolve foreign key-based params (catalog={}, schema={}, table={}), returning empty result set",
foreignCatalog,
foreignSchema,
foreignTable);
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
}

// Resolve null params for the parent side (used for filtering results)
// Resolve null/empty params for the parent side (used for filtering results)
String[] resolvedParentParams =
resolveKeyBasedParams(parentCatalog, parentSchema, parentTable, session);
if (resolvedParentParams == null) {
LOGGER.debug(
"Could not resolve parent key-based params (catalog={}, schema={}, table={}), returning empty result set",
parentCatalog,
parentSchema,
parentTable);
return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>());
}

String resolvedForeignCatalog = resolvedForeignParams[0];
String resolvedForeignSchema = resolvedForeignParams[1];
Expand Down Expand Up @@ -542,15 +507,20 @@ private String autoFillCatalog(String catalog, String currentCatalog) {
}

/**
* Resolves null catalog/schema for key-based metadata operations to match Thrift server behavior.
* When catalog is null, it is replaced with current_catalog and (if schema is also null) schema
* is replaced with current_schema. Returns null if the caller should return an empty result set
* (table is null, schema is null without catalog also being null, or any resolved value is null).
* Validates and resolves null/empty catalog/schema/table for key-based metadata operations to
* match Thrift server behavior. Throws DatabricksSQLException for invalid parameter combinations
* (matching Thrift error behavior). When catalog is null, it is replaced with current_catalog and
* (if schema is also null) schema is replaced with current_schema.
*
* @throws DatabricksSQLException if table is null/empty, or schema is null/empty with an explicit
* catalog
*/
private String[] resolveKeyBasedParams(
String catalog, String schema, String table, IDatabricksSession session) throws SQLException {
if (table == null) {
return null;
if (table == null || table.isEmpty()) {
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

logging

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done — added LOGGER.debug() with the table value before throwing.

throw new DatabricksSQLException(
"Invalid argument: tableName may not be null or empty",
DatabricksDriverErrorCode.INVALID_STATE);
}

if (catalog == null) {
Expand All @@ -559,12 +529,16 @@ private String[] resolveKeyBasedParams(
if (schema == null) {
schema = currentCatalogAndSchema[1];
}
} else if (schema == null) {
return null;
} else if (schema == null || schema.isEmpty()) {
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

logging

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done — added LOGGER.debug() that includes the explicit catalog value for context.

throw new DatabricksSQLException(
"Invalid argument: schema may not be null or empty when catalog is specified",
DatabricksDriverErrorCode.INVALID_STATE);
}

if (catalog == null || schema == null) {
return null;
throw new DatabricksSQLException(
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why this exception? this will throw when catalog= null, but schema is valid

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a safety net for when getCurrentCatalogAndSchema() returns null values (server error edge case). In normal flow: if catalog was null it gets resolved at line 533, and if schema was also null it gets resolved at line 536 — so both are non-null by this point. This check only triggers if the server returns null for current_catalog or current_schema. Added a comment and debug log with the actual values for clarity.

"Invalid argument: could not resolve catalog or schema",
DatabricksDriverErrorCode.INVALID_STATE);
}

return new String[] {catalog, schema, table};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -764,13 +764,15 @@ void testListCrossReferences() throws Exception {
* delegates to getExportedKeys which returns empty in DBSQL.
*/
@Test
void testListCrossReferences_allForeignParamsNull_returnsEmpty() throws Exception {
void testListCrossReferences_allForeignParamsNull_throws() {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

DatabricksResultSet result =
metadataClient.listCrossReferences(
session, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE, null, null, null);
assertFalse(result.next(), "Should return empty when all foreign params are null, not throw");
assertThrows(
DatabricksSQLException.class,
() ->
metadataClient.listCrossReferences(
session, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE, null, null, null),
"Should throw when foreign table is null");
}

@Test
Expand Down Expand Up @@ -943,40 +945,78 @@ void testListFunctionsWithNullCatalog() throws SQLException {
}

@Test
void testKeyBasedOpsReturnEmptyForNullTable() throws SQLException {
void testKeyBasedOpsThrowForNullTable() {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, null),
"listPrimaryKeys should throw for null table");

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null),
"listImportedKeys should throw for null table");
}

@Test
void testKeyBasedOpsThrowForEmptyTable() {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, ""),
"listPrimaryKeys should throw for empty table");

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""),
"listImportedKeys should throw for empty table");
}

@Test
void testKeyBasedOpsThrowForNullSchemaWithExplicitCatalog() {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

// null table should return empty for listPrimaryKeys
DatabricksResultSet pkResult =
metadataClient.listPrimaryKeys(session, TEST_CATALOG, TEST_SCHEMA, null);
assertNotNull(pkResult);
assertFalse(pkResult.next(), "Expected empty result set for listPrimaryKeys with null table");

// null table should return empty for listImportedKeys
DatabricksResultSet ikResult =
metadataClient.listImportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null);
assertNotNull(ikResult);
assertFalse(ikResult.next(), "Expected empty result set for listImportedKeys with null table");
assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listPrimaryKeys(session, "any_catalog", null, TEST_TABLE),
"listPrimaryKeys should throw for null schema with explicit catalog");

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listImportedKeys(session, "any_catalog", null, TEST_TABLE),
"listImportedKeys should throw for null schema with explicit catalog");
}

@Test
void testKeyBasedOpsReturnEmptyForNullSchemaWithExplicitCatalog() throws SQLException {
void testKeyBasedOpsThrowForEmptySchemaWithExplicitCatalog() {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

// schema=null with explicit catalog should return empty (matching Thrift behavior)
DatabricksResultSet pkResult =
metadataClient.listPrimaryKeys(session, "any_catalog", null, TEST_TABLE);
assertNotNull(pkResult);
assertFalse(
pkResult.next(),
"Expected empty result set for listPrimaryKeys with null schema and explicit catalog");

DatabricksResultSet ikResult =
metadataClient.listImportedKeys(session, "any_catalog", null, TEST_TABLE);
assertNotNull(ikResult);
assertFalse(
ikResult.next(),
"Expected empty result set for listImportedKeys with null schema and explicit catalog");
assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listPrimaryKeys(session, "any_catalog", "", TEST_TABLE),
"listPrimaryKeys should throw for empty schema with explicit catalog");

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listImportedKeys(session, "any_catalog", "", TEST_TABLE),
"listImportedKeys should throw for empty schema with explicit catalog");
}

@Test
void testExportedKeysThrowsForNullTable() {
DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient);

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, null),
"listExportedKeys should throw for null table");

assertThrows(
DatabricksSQLException.class,
() -> metadataClient.listExportedKeys(session, TEST_CATALOG, TEST_SCHEMA, ""),
"listExportedKeys should throw for empty table");
}

@Test
Expand Down
Loading
Loading