Skip to content

Commit 4d10dcc

Browse files
fix type issues
Signed-off-by: varun-edachali-dbx <varun.edachali@databricks.com>
1 parent 7d21ad1 commit 4d10dcc

File tree

3 files changed

+9
-7
lines changed

3 files changed

+9
-7
lines changed

src/databricks/sql/backend/sea/backend.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -511,6 +511,8 @@ def cancel_command(self, command_id: CommandId) -> None:
511511
raise ProgrammingError("Not a valid SEA command ID")
512512

513513
sea_statement_id = command_id.to_sea_statement_id()
514+
if sea_statement_id is None:
515+
raise ProgrammingError("Not a valid SEA command ID")
514516

515517
request = CancelStatementRequest(statement_id=sea_statement_id)
516518
self.http_client._make_request(
@@ -534,6 +536,8 @@ def close_command(self, command_id: CommandId) -> None:
534536
raise ProgrammingError("Not a valid SEA command ID")
535537

536538
sea_statement_id = command_id.to_sea_statement_id()
539+
if sea_statement_id is None:
540+
raise ProgrammingError("Not a valid SEA command ID")
537541

538542
request = CloseStatementRequest(statement_id=sea_statement_id)
539543
self.http_client._make_request(
@@ -560,6 +564,8 @@ def get_query_state(self, command_id: CommandId) -> CommandState:
560564
raise ValueError("Not a valid SEA command ID")
561565

562566
sea_statement_id = command_id.to_sea_statement_id()
567+
if sea_statement_id is None:
568+
raise ProgrammingError("Not a valid SEA command ID")
563569

564570
request = GetStatementRequest(statement_id=sea_statement_id)
565571
response_data = self.http_client._make_request(
@@ -595,6 +601,8 @@ def get_execution_result(
595601
raise ProgrammingError("Not a valid SEA command ID")
596602

597603
sea_statement_id = command_id.to_sea_statement_id()
604+
if sea_statement_id is None:
605+
raise ProgrammingError("Not a valid SEA command ID")
598606

599607
# Create the request model
600608
request = GetStatementRequest(statement_id=sea_statement_id)

src/databricks/sql/result_set.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -522,9 +522,6 @@ def _convert_json_types(self, rows: List[List]) -> List[List]:
522522
Convert string values to appropriate Python types based on column metadata.
523523
"""
524524

525-
if not self.description or not rows:
526-
return rows
527-
528525
# JSON + INLINE gives us string values, so we convert them to appropriate
529526
# types based on column metadata
530527
converted_rows = []
@@ -562,9 +559,6 @@ def _create_json_table(self, rows: List[List]) -> List[Row]:
562559
List of Row objects with named columns and converted values
563560
"""
564561

565-
if not self.description or not rows:
566-
return rows
567-
568562
ResultRow = Row(*[col[0] for col in self.description])
569563
rows = self._convert_json_types(rows)
570564

src/databricks/sql/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,7 @@ def __init__(
274274
start_row_offset: int = 0,
275275
result_links: Optional[List[TSparkArrowResultLink]] = None,
276276
lz4_compressed: bool = True,
277-
description: List[Tuple] = [],
277+
description: Optional[List[Tuple]] = None,
278278
):
279279
"""
280280
A queue-like wrapper over CloudFetch arrow batches.

0 commit comments

Comments
 (0)