-
Notifications
You must be signed in to change notification settings - Fork 39
SEA metadata: throw for invalid null/empty params (Thrift parity) #1390
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
e46612a
c39197e
22e416a
94c6e15
7cf3826
4736e83
188966e
eb0a33c
b3e4199
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -14,8 +14,10 @@ | |
| import com.databricks.jdbc.dbclient.IDatabricksMetadataClient; | ||
| import com.databricks.jdbc.dbclient.impl.common.CommandConstants; | ||
| import com.databricks.jdbc.dbclient.impl.common.MetadataResultSetBuilder; | ||
| import com.databricks.jdbc.exception.DatabricksSQLException; | ||
| import com.databricks.jdbc.log.JdbcLogger; | ||
| import com.databricks.jdbc.log.JdbcLoggerFactory; | ||
| import com.databricks.jdbc.model.telemetry.enums.DatabricksDriverErrorCode; | ||
| import java.sql.ResultSet; | ||
| import java.sql.ResultSetMetaData; | ||
| import java.sql.SQLException; | ||
|
|
@@ -324,19 +326,6 @@ public DatabricksResultSet listPrimaryKeys( | |
| catalog = autoFillCatalog(catalog, currentCatalog); | ||
|
|
||
| String[] resolvedParams = resolveKeyBasedParams(catalog, schema, table, session); | ||
| if (resolvedParams == null) { | ||
| LOGGER.debug( | ||
| "Could not resolve key-based params (catalog={}, schema={}, table={}), returning empty result set for listPrimaryKeys", | ||
| catalog, | ||
| schema, | ||
| table); | ||
| return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns( | ||
| PRIMARY_KEYS_COLUMNS, | ||
| new ArrayList<>(), | ||
| METADATA_STATEMENT_ID, | ||
| com.databricks.jdbc.common.CommandName.LIST_PRIMARY_KEYS); | ||
| } | ||
|
|
||
| String resolvedCatalog = resolvedParams[0]; | ||
| String resolvedSchema = resolvedParams[1]; | ||
| String resolvedTable = resolvedParams[2]; | ||
|
|
@@ -373,19 +362,6 @@ public DatabricksResultSet listImportedKeys( | |
| catalog = autoFillCatalog(catalog, currentCatalog); | ||
|
|
||
| String[] resolvedParams = resolveKeyBasedParams(catalog, schema, table, session); | ||
| if (resolvedParams == null) { | ||
| LOGGER.debug( | ||
| "Could not resolve key-based params (catalog={}, schema={}, table={}), returning empty result set for listImportedKeys", | ||
| catalog, | ||
| schema, | ||
| table); | ||
| return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns( | ||
| IMPORTED_KEYS_COLUMNS, | ||
| new ArrayList<>(), | ||
| METADATA_STATEMENT_ID, | ||
| com.databricks.jdbc.common.CommandName.GET_IMPORTED_KEYS); | ||
| } | ||
|
|
||
| String resolvedCatalog = resolvedParams[0]; | ||
| String resolvedSchema = resolvedParams[1]; | ||
| String resolvedTable = resolvedParams[2]; | ||
|
|
@@ -414,6 +390,12 @@ public DatabricksResultSet listExportedKeys( | |
| IDatabricksSession session, String catalog, String schema, String table) throws SQLException { | ||
| LOGGER.debug("public ResultSet listExportedKeys() using SDK"); | ||
|
|
||
| if (table == null || table.isEmpty()) { | ||
|
||
| throw new DatabricksSQLException( | ||
| "Invalid argument: tableName may not be null or empty", | ||
| DatabricksDriverErrorCode.INVALID_STATE); | ||
| } | ||
|
|
||
| // Only fetch currentCatalog if multiple catalog support is disabled | ||
| String currentCatalog = isMultipleCatalogSupportDisabled() ? session.getCurrentCatalog() : null; | ||
| if (!metadataResultSetBuilder.shouldAllowCatalogAccess(catalog, currentCatalog, session)) { | ||
|
|
@@ -446,29 +428,12 @@ public DatabricksResultSet listCrossReferences( | |
| return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>()); | ||
| } | ||
|
|
||
| // Resolve null params for the foreign side (used to build the SQL query) | ||
| // Resolve null/empty params for the foreign side (used to build the SQL query) | ||
| String[] resolvedForeignParams = | ||
| resolveKeyBasedParams(foreignCatalog, foreignSchema, foreignTable, session); | ||
| if (resolvedForeignParams == null) { | ||
| LOGGER.debug( | ||
| "Could not resolve foreign key-based params (catalog={}, schema={}, table={}), returning empty result set", | ||
| foreignCatalog, | ||
| foreignSchema, | ||
| foreignTable); | ||
| return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>()); | ||
| } | ||
|
|
||
| // Resolve null params for the parent side (used for filtering results) | ||
| // Resolve null/empty params for the parent side (used for filtering results) | ||
| String[] resolvedParentParams = | ||
| resolveKeyBasedParams(parentCatalog, parentSchema, parentTable, session); | ||
| if (resolvedParentParams == null) { | ||
| LOGGER.debug( | ||
| "Could not resolve parent key-based params (catalog={}, schema={}, table={}), returning empty result set", | ||
| parentCatalog, | ||
| parentSchema, | ||
| parentTable); | ||
| return metadataResultSetBuilder.getCrossRefsResult(new ArrayList<>()); | ||
| } | ||
|
|
||
| String resolvedForeignCatalog = resolvedForeignParams[0]; | ||
| String resolvedForeignSchema = resolvedForeignParams[1]; | ||
|
|
@@ -542,15 +507,20 @@ private String autoFillCatalog(String catalog, String currentCatalog) { | |
| } | ||
|
|
||
| /** | ||
| * Resolves null catalog/schema for key-based metadata operations to match Thrift server behavior. | ||
| * When catalog is null, it is replaced with current_catalog and (if schema is also null) schema | ||
| * is replaced with current_schema. Returns null if the caller should return an empty result set | ||
| * (table is null, schema is null without catalog also being null, or any resolved value is null). | ||
| * Validates and resolves null/empty catalog/schema/table for key-based metadata operations to | ||
| * match Thrift server behavior. Throws DatabricksSQLException for invalid parameter combinations | ||
| * (matching Thrift error behavior). When catalog is null, it is replaced with current_catalog and | ||
| * (if schema is also null) schema is replaced with current_schema. | ||
| * | ||
| * @throws DatabricksSQLException if table is null/empty, or schema is null/empty with an explicit | ||
| * catalog | ||
| */ | ||
| private String[] resolveKeyBasedParams( | ||
| String catalog, String schema, String table, IDatabricksSession session) throws SQLException { | ||
| if (table == null) { | ||
| return null; | ||
| if (table == null || table.isEmpty()) { | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. logging
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done — added LOGGER.debug() with the table value before throwing. |
||
| throw new DatabricksSQLException( | ||
| "Invalid argument: tableName may not be null or empty", | ||
| DatabricksDriverErrorCode.INVALID_STATE); | ||
| } | ||
|
|
||
| if (catalog == null) { | ||
|
|
@@ -559,12 +529,16 @@ private String[] resolveKeyBasedParams( | |
| if (schema == null) { | ||
| schema = currentCatalogAndSchema[1]; | ||
| } | ||
| } else if (schema == null) { | ||
| return null; | ||
| } else if (schema == null || schema.isEmpty()) { | ||
|
||
| throw new DatabricksSQLException( | ||
| "Invalid argument: schema may not be null or empty when catalog is specified", | ||
| DatabricksDriverErrorCode.INVALID_STATE); | ||
| } | ||
|
|
||
| if (catalog == null || schema == null) { | ||
| return null; | ||
| throw new DatabricksSQLException( | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. why this exception? this will throw when catalog= null, but schema is valid
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is a safety net for when getCurrentCatalogAndSchema() returns null values (server error edge case). In normal flow: if catalog was null it gets resolved at line 533, and if schema was also null it gets resolved at line 536 — so both are non-null by this point. This check only triggers if the server returns null for current_catalog or current_schema. Added a comment and debug log with the actual values for clarity. |
||
| "Invalid argument: could not resolve catalog or schema", | ||
| DatabricksDriverErrorCode.INVALID_STATE); | ||
| } | ||
|
|
||
| return new String[] {catalog, schema, table}; | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
add logging
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Done — added LOGGER.debug() before all throw sites in the latest commits.