3
3
import json
4
4
import logging
5
5
import random
6
+ import sys
6
7
import threading
7
8
import time
8
9
import types
10
+ import warnings
9
11
from collections .abc import Callable , Iterator
10
12
from datetime import timedelta
11
13
from typing import TYPE_CHECKING , Any
@@ -70,28 +72,39 @@ def __new__(cls, *args, **kwargs):
70
72
return tuple .__new__ (cls , args )
71
73
72
74
@classmethod
73
- def factory (cls , col_names : list [str ]) -> type :
75
+ def factory (cls , col_names : list [str ]) -> type [ "Row" ] :
74
76
"""Create a new Row class with the given column names."""
75
77
return type ("Row" , (Row ,), {"__columns__" : col_names })
76
78
77
- def as_dict (self ) -> dict [str , Any ]:
78
- """Convert the row to a dictionary with the same conventions as Databricks SDK."""
79
- return dict (zip (self .__columns__ , self , strict = True ))
79
+ # If we can mark the method as deprecated via PEP 702 annotation, prefer this because it helps mypy and
80
+ # PyCharm/IntelliJ detect and flag deprecated use.
81
+ if sys .version_info >= (3 , 13 ):
82
+
83
+ @warnings .deprecated ("Using as_dict() on rows is deprecated; use asDict() instead." ) # pylint: disable=no-member
84
+ def as_dict (self ) -> dict [str , Any ]:
85
+ """Convert the row to a dictionary with the same conventions as Databricks SDK."""
86
+ return self .asDict ()
87
+ else :
88
+
89
+ def as_dict (self ) -> dict [str , Any ]:
90
+ """Convert the row to a dictionary with the same conventions as Databricks SDK."""
91
+ warnings .warn ("Using as_dict() on rows is deprecated; use asDict() instead." , DeprecationWarning )
92
+ return self .asDict ()
80
93
81
94
# PySpark's compatibility
82
95
def asDict (self , recursive : bool = False ) -> dict [str , Any ]:
83
96
_ = recursive
84
- return self .as_dict ( )
97
+ return dict ( zip ( self .__columns__ , self , strict = True ) )
85
98
86
- def __eq__ (self , other ):
99
+ def __eq__ (self , other ) -> bool :
87
100
"""Check if the rows are equal."""
88
101
if not isinstance (other , Row ):
89
102
return False
90
103
# compare rows as dictionaries, because the order
91
104
# of fields in constructor is not guaranteed
92
- return self .as_dict () == other .as_dict ()
105
+ return self .asDict () == other .asDict ()
93
106
94
- def __contains__ (self , item ):
107
+ def __contains__ (self , item ) -> bool :
95
108
"""Check if the column is in the row."""
96
109
return item in self .__columns__
97
110
@@ -114,7 +127,7 @@ def __getattr__(self, col):
114
127
except ValueError :
115
128
raise AttributeError (col ) from None
116
129
117
- def __repr__ (self ):
130
+ def __repr__ (self ) -> str :
118
131
"""Get the string representation of the row."""
119
132
return f"Row({ ', ' .join (f'{ k } ={ v !r} ' for (k , v ) in zip (self .__columns__ , self , strict = True ))} )"
120
133
@@ -311,7 +324,7 @@ def fetch_all(
311
324
>>> pickup_time, dropoff_time = row[0], row[1]
312
325
>>> pickup_zip = row.pickup_zip
313
326
>>> dropoff_zip = row["dropoff_zip"]
314
- >>> all_fields = row.as_dict ()
327
+ >>> all_fields = row.asDict ()
315
328
>>> logger.info(f"{pickup_zip}@{pickup_time} -> {dropoff_zip}@{dropoff_time}: {all_fields}")
316
329
317
330
:param statement: str
@@ -366,7 +379,7 @@ def fetch_one(self, statement: str, disable_magic: bool = False, **kwargs) -> Ro
366
379
>>> pickup_time, dropoff_time = row[0], row[1]
367
380
>>> pickup_zip = row.pickup_zip
368
381
>>> dropoff_zip = row['dropoff_zip']
369
- >>> all_fields = row.as_dict ()
382
+ >>> all_fields = row.asDict ()
370
383
>>> print(f'{pickup_zip}@{pickup_time} -> {dropoff_zip}@{dropoff_time}: {all_fields}')
371
384
372
385
:param statement: str
0 commit comments