@@ -210,8 +210,8 @@ def _load_previous_if_any(self):
210
210
211
211
def _is_previous_valid (self , previous ):
212
212
return previous .path == self .path and \
213
- previous .start_blob_upload_request == self .start_blob_upload_request and \
214
- previous .timestamp > time .time () - ResumableFileUpload .RESUMABLE_UPLOAD_EXPIRY_SECONDS
213
+ previous .start_blob_upload_request == self .start_blob_upload_request and \
214
+ previous .timestamp > time .time () - ResumableFileUpload .RESUMABLE_UPLOAD_EXPIRY_SECONDS
215
215
216
216
def upload_initiated (self , start_blob_upload_response ):
217
217
if self .context .no_resume :
@@ -382,11 +382,11 @@ class KaggleApi(KaggleApi):
382
382
383
383
def _is_retriable (self , e ):
384
384
return issubclass (type (e ), ConnectionError ) or \
385
- issubclass (type (e ), urllib3_exceptions .ConnectionError ) or \
386
- issubclass (type (e ), urllib3_exceptions .ConnectTimeoutError ) or \
387
- issubclass (type (e ), urllib3_exceptions .ProtocolError ) or \
388
- issubclass (type (e ), requests .exceptions .ConnectionError ) or \
389
- issubclass (type (e ), requests .exceptions .ConnectTimeout )
385
+ issubclass (type (e ), urllib3_exceptions .ConnectionError ) or \
386
+ issubclass (type (e ), urllib3_exceptions .ConnectTimeoutError ) or \
387
+ issubclass (type (e ), urllib3_exceptions .ProtocolError ) or \
388
+ issubclass (type (e ), requests .exceptions .ConnectionError ) or \
389
+ issubclass (type (e ), requests .exceptions .ConnectTimeout )
390
390
391
391
def _calculate_backoff_delay (self , attempt , initial_delay_millis ,
392
392
retry_multiplier , randomness_factor ):
@@ -437,7 +437,7 @@ def authenticate(self):
437
437
438
438
# Step 2: if credentials were not in env read in configuration file
439
439
if self .CONFIG_NAME_USER not in config_data \
440
- or self .CONFIG_NAME_KEY not in config_data :
440
+ or self .CONFIG_NAME_KEY not in config_data :
441
441
if os .path .exists (self .config ):
442
442
config_data = self .read_config_file (config_data )
443
443
elif self ._is_help_or_version_command (api_command ) or (len (
@@ -709,9 +709,9 @@ def print_config_values(self, prefix='- '):
709
709
710
710
def build_kaggle_client (self ):
711
711
env = KaggleEnv .STAGING if '--staging' in self .args \
712
- else KaggleEnv .ADMIN if '--admin' in self .args \
713
- else KaggleEnv .LOCAL if '--local' in self .args \
714
- else KaggleEnv .PROD
712
+ else KaggleEnv .ADMIN if '--admin' in self .args \
713
+ else KaggleEnv .LOCAL if '--local' in self .args \
714
+ else KaggleEnv .PROD
715
715
verbose = '--verbose' in self .args or '-v' in self .args
716
716
config = self .api_client .configuration
717
717
return KaggleClient (
@@ -935,9 +935,9 @@ def competition_submissions_cli(self,
935
935
competition , page_token = page_token , page_size = page_size )
936
936
if submissions :
937
937
if csv_display :
938
- self .print_csv (submissions , submission_fields )
938
+ self .print_csv (submissions , self . submission_fields )
939
939
else :
940
- self .print_table (submissions , submission_fields )
940
+ self .print_table (submissions , self . submission_fields )
941
941
else :
942
942
print ('No submissions found' )
943
943
@@ -1397,7 +1397,7 @@ def dataset_list_files(self, dataset, page_token=None, page_size=20):
1397
1397
if dataset is None :
1398
1398
raise ValueError ('A dataset must be specified' )
1399
1399
owner_slug , dataset_slug , dataset_version_number = self .split_dataset_string (
1400
- dataset )
1400
+ dataset )
1401
1401
1402
1402
with self .build_kaggle_client () as kaggle :
1403
1403
request = ApiListDatasetFilesRequest ()
@@ -1501,15 +1501,15 @@ def dataset_download_file(self,
1501
1501
if '/' in dataset :
1502
1502
self .validate_dataset_string (dataset )
1503
1503
owner_slug , dataset_slug , dataset_version_number = self .split_dataset_string (
1504
- dataset )
1504
+ dataset )
1505
1505
else :
1506
1506
owner_slug = self .get_config_value (self .CONFIG_NAME_USER )
1507
1507
dataset_slug = dataset
1508
1508
dataset_version_number = None
1509
1509
1510
1510
if path is None :
1511
- effective_path = self .get_default_download_dir (
1512
- 'datasets' , owner_slug , dataset_slug )
1511
+ effective_path = self .get_default_download_dir ('datasets' , owner_slug ,
1512
+ dataset_slug )
1513
1513
else :
1514
1514
effective_path = path
1515
1515
@@ -1554,10 +1554,10 @@ def dataset_download_files(self,
1554
1554
if dataset is None :
1555
1555
raise ValueError ('A dataset must be specified' )
1556
1556
owner_slug , dataset_slug , dataset_version_number = self .split_dataset_string (
1557
- dataset )
1557
+ dataset )
1558
1558
if path is None :
1559
- effective_path = self .get_default_download_dir (
1560
- 'datasets' , owner_slug , dataset_slug )
1559
+ effective_path = self .get_default_download_dir ('datasets' , owner_slug ,
1560
+ dataset_slug )
1561
1561
else :
1562
1562
effective_path = path
1563
1563
@@ -1586,18 +1586,18 @@ def dataset_download_files(self,
1586
1586
z .extractall (effective_path )
1587
1587
except zipfile .BadZipFile as e :
1588
1588
raise ValueError (
1589
- f"The file { outfile } is corrupted or not a valid zip file. "
1590
- "Please report this issue at https://www.github.com/kaggle/kaggle-api"
1589
+ f"The file { outfile } is corrupted or not a valid zip file. "
1590
+ "Please report this issue at https://www.github.com/kaggle/kaggle-api"
1591
1591
)
1592
1592
except FileNotFoundError :
1593
1593
raise FileNotFoundError (
1594
- f"The file { outfile } was not found. "
1595
- "Please report this issue at https://www.github.com/kaggle/kaggle-api"
1594
+ f"The file { outfile } was not found. "
1595
+ "Please report this issue at https://www.github.com/kaggle/kaggle-api"
1596
1596
)
1597
1597
except Exception as e :
1598
1598
raise RuntimeError (
1599
- f"An unexpected error occurred: { e } . "
1600
- "Please report this issue at https://www.github.com/kaggle/kaggle-api"
1599
+ f"An unexpected error occurred: { e } . "
1600
+ "Please report this issue at https://www.github.com/kaggle/kaggle-api"
1601
1601
)
1602
1602
1603
1603
try :
@@ -1754,8 +1754,10 @@ def dataset_create_version(self,
1754
1754
id_no = self .get_or_default (meta_data , 'id_no' , None )
1755
1755
if not ref and not id_no :
1756
1756
raise ValueError ('ID or slug must be specified in the metadata' )
1757
- elif ref and ref == self .config_values [self .CONFIG_NAME_USER ] + '/INSERT_SLUG_HERE' :
1758
- raise ValueError ('Default slug detected, please change values before uploading' )
1757
+ elif ref and ref == self .config_values [
1758
+ self .CONFIG_NAME_USER ] + '/INSERT_SLUG_HERE' :
1759
+ raise ValueError (
1760
+ 'Default slug detected, please change values before uploading' )
1759
1761
1760
1762
subtitle = meta_data .get ('subtitle' )
1761
1763
if subtitle and (len (subtitle ) < 20 or len (subtitle ) > 80 ):
@@ -1793,7 +1795,9 @@ def dataset_create_version(self,
1793
1795
with ResumableUploadContext () as upload_context :
1794
1796
self .upload_files (body , resources , folder , ApiBlobType .DATASET ,
1795
1797
upload_context , quiet , dir_mode )
1796
- request .body .files = [self ._api_dataset_new_file (file ) for file in request .body .files ]
1798
+ request .body .files = [
1799
+ self ._api_dataset_new_file (file ) for file in request .body .files
1800
+ ]
1797
1801
response = self .with_retry (message )(request )
1798
1802
return response
1799
1803
@@ -1808,8 +1812,10 @@ def dataset_create_version(self,
1808
1812
self .datasets_create_version_by_id_with_http_info )(
1809
1813
id_no , request )))
1810
1814
else :
1811
- if ref == self .config_values [self .CONFIG_NAME_USER ] + '/INSERT_SLUG_HERE' :
1812
- raise ValueError ('Default slug detected, please change values before uploading' )
1815
+ if ref == self .config_values [
1816
+ self .CONFIG_NAME_USER ] + '/INSERT_SLUG_HERE' :
1817
+ raise ValueError (
1818
+ 'Default slug detected, please change values before uploading' )
1813
1819
self .validate_dataset_string (ref )
1814
1820
ref_list = ref .split ('/' )
1815
1821
owner_slug = ref_list [0 ]
@@ -1924,22 +1930,18 @@ def dataset_create_new(self,
1924
1930
dataset_slug = ref_list [1 ]
1925
1931
1926
1932
# validations
1927
- if ref == self .config_values [
1928
- self .CONFIG_NAME_USER ] + '/INSERT_SLUG_HERE' :
1933
+ if ref == self .config_values [self .CONFIG_NAME_USER ] + '/INSERT_SLUG_HERE' :
1929
1934
raise ValueError (
1930
- 'Default slug detected, please change values before uploading' )
1935
+ 'Default slug detected, please change values before uploading' )
1931
1936
if title == 'INSERT_TITLE_HERE' :
1932
1937
raise ValueError (
1933
- 'Default title detected, please change values before uploading'
1934
- )
1938
+ 'Default title detected, please change values before uploading' )
1935
1939
if len (licenses ) != 1 :
1936
1940
raise ValueError ('Please specify exactly one license' )
1937
1941
if len (dataset_slug ) < 6 or len (dataset_slug ) > 50 :
1938
- raise ValueError (
1939
- 'The dataset slug must be between 6 and 50 characters' )
1942
+ raise ValueError ('The dataset slug must be between 6 and 50 characters' )
1940
1943
if len (title ) < 6 or len (title ) > 50 :
1941
- raise ValueError (
1942
- 'The dataset title must be between 6 and 50 characters' )
1944
+ raise ValueError ('The dataset title must be between 6 and 50 characters' )
1943
1945
resources = meta_data .get ('resources' )
1944
1946
if resources :
1945
1947
self .validate_resources (folder , resources )
@@ -1950,19 +1952,19 @@ def dataset_create_new(self,
1950
1952
1951
1953
subtitle = meta_data .get ('subtitle' )
1952
1954
if subtitle and (len (subtitle ) < 20 or len (subtitle ) > 80 ):
1953
- raise ValueError (
1954
- 'Subtitle length must be between 20 and 80 characters' )
1955
-
1956
- request = DatasetNewRequest ( title = title ,
1957
- slug = dataset_slug ,
1958
- owner_slug = owner_slug ,
1959
- license_name = license_name ,
1960
- subtitle = subtitle ,
1961
- description = description ,
1962
- files = [],
1963
- is_private = not public ,
1964
- convert_to_csv = convert_to_csv ,
1965
- category_ids = keywords )
1955
+ raise ValueError ('Subtitle length must be between 20 and 80 characters' )
1956
+
1957
+ request = DatasetNewRequest (
1958
+ title = title ,
1959
+ slug = dataset_slug ,
1960
+ owner_slug = owner_slug ,
1961
+ license_name = license_name ,
1962
+ subtitle = subtitle ,
1963
+ description = description ,
1964
+ files = [],
1965
+ is_private = not public ,
1966
+ convert_to_csv = convert_to_csv ,
1967
+ category_ids = keywords )
1966
1968
1967
1969
with ResumableUploadContext () as upload_context :
1968
1970
# TODO Change upload_files() to use ApiCreateDatasetRequest
@@ -1971,23 +1973,24 @@ def dataset_create_new(self,
1971
1973
1972
1974
with self .build_kaggle_client () as kaggle :
1973
1975
retry_request = ApiCreateDatasetRequest ()
1974
- retry_request .title = title
1975
- retry_request .slug = dataset_slug
1976
- retry_request .owner_slug = owner_slug
1977
- retry_request .license_name = license_name
1978
- retry_request .subtitle = subtitle
1979
- retry_request .description = description
1980
- retry_request .files = []
1981
- retry_request .is_private = not public
1982
- retry_request .category_ids = keywords
1976
+ retry_request .title = title
1977
+ retry_request .slug = dataset_slug
1978
+ retry_request .owner_slug = owner_slug
1979
+ retry_request .license_name = license_name
1980
+ retry_request .subtitle = subtitle
1981
+ retry_request .description = description
1982
+ retry_request .files = []
1983
+ retry_request .is_private = not public
1984
+ retry_request .category_ids = keywords
1983
1985
response = self .with_retry (
1984
- kaggle .datasets .dataset_api_client .create_dataset )(retry_request )
1986
+ kaggle .datasets .dataset_api_client .create_dataset )(
1987
+ retry_request )
1985
1988
return response
1986
1989
1987
1990
result = DatasetNewResponse (
1988
- self .process_response (
1989
- self .with_retry (
1990
- self .datasets_create_new_with_http_info )(request )))
1991
+ self .process_response (
1992
+ self .with_retry (
1993
+ self .datasets_create_new_with_http_info )(request )))
1991
1994
1992
1995
return result
1993
1996
0 commit comments