|
24 | 24 | from os.path import expanduser
|
25 | 25 | from os.path import isfile
|
26 | 26 | import sys
|
| 27 | +import shutil |
27 | 28 | import zipfile
|
| 29 | +import tempfile |
28 | 30 | from ..api_client import ApiClient
|
29 | 31 | from kaggle.configuration import Configuration
|
30 | 32 | from .kaggle_api import KaggleApi
|
@@ -2229,40 +2231,56 @@ def upload_files(self, request, resources, folder, quiet=False):
|
2229 | 2231 | if not quiet:
|
2230 | 2232 | print('Starting upload for file ' + file_name)
|
2231 | 2233 | if os.path.isfile(full_path):
|
2232 |
| - content_length = os.path.getsize(full_path) |
2233 |
| - token = self.dataset_upload_file(full_path, quiet) |
2234 |
| - if token is None: |
2235 |
| - if not quiet: |
2236 |
| - print('Upload unsuccessful: ' + file_name) |
| 2234 | + retval = self._upload_file(file_name, full_path, quiet, request, resources) |
| 2235 | + if retval: |
| 2236 | + return |
| 2237 | + if os.path.isdir(full_path): |
| 2238 | + temp_dir = tempfile.mkdtemp() |
| 2239 | + try: |
| 2240 | + _, dir_name = os.path.split(full_path) |
| 2241 | + archive_path = shutil.make_archive(os.path.join(temp_dir, dir_name), "zip", |
| 2242 | + full_path) |
| 2243 | + _, archive_name = os.path.split(archive_path) |
| 2244 | + retval = self._upload_file(archive_name, archive_path, quiet, request, |
| 2245 | + resources) |
| 2246 | + finally: |
| 2247 | + shutil.rmtree(temp_dir) |
| 2248 | + if retval: |
2237 | 2249 | return
|
2238 |
| - |
2239 |
| - if not quiet: |
2240 |
| - print('Upload successful: ' + file_name + ' (' + |
2241 |
| - File.get_size(content_length) + ')') |
2242 |
| - |
2243 |
| - upload_file = DatasetUploadFile() |
2244 |
| - upload_file.token = token |
2245 |
| - if resources: |
2246 |
| - for item in resources: |
2247 |
| - if file_name == item.get('path'): |
2248 |
| - upload_file.description = item.get('description') |
2249 |
| - if 'schema' in item: |
2250 |
| - fields = self.get_or_default( |
2251 |
| - item['schema'], 'fields', []) |
2252 |
| - processed = [] |
2253 |
| - count = 0 |
2254 |
| - for field in fields: |
2255 |
| - processed.append( |
2256 |
| - self.process_column(field)) |
2257 |
| - processed[count].order = count |
2258 |
| - count += 1 |
2259 |
| - upload_file.columns = processed |
2260 |
| - |
2261 |
| - request.files.append(upload_file) |
2262 | 2250 | else:
|
2263 | 2251 | if not quiet:
|
2264 | 2252 | print('Skipping: ' + file_name)
|
2265 | 2253 |
|
| 2254 | + def _upload_file(self, file_name, full_path, quiet, request, resources): |
| 2255 | + content_length = os.path.getsize(full_path) |
| 2256 | + token = self.dataset_upload_file(full_path, quiet) |
| 2257 | + if token is None: |
| 2258 | + if not quiet: |
| 2259 | + print('Upload unsuccessful: ' + file_name) |
| 2260 | + return True |
| 2261 | + if not quiet: |
| 2262 | + print('Upload successful: ' + file_name + ' (' + |
| 2263 | + File.get_size(content_length) + ')') |
| 2264 | + upload_file = DatasetUploadFile() |
| 2265 | + upload_file.token = token |
| 2266 | + if resources: |
| 2267 | + for item in resources: |
| 2268 | + if file_name == item.get('path'): |
| 2269 | + upload_file.description = item.get('description') |
| 2270 | + if 'schema' in item: |
| 2271 | + fields = self.get_or_default( |
| 2272 | + item['schema'], 'fields', []) |
| 2273 | + processed = [] |
| 2274 | + count = 0 |
| 2275 | + for field in fields: |
| 2276 | + processed.append( |
| 2277 | + self.process_column(field)) |
| 2278 | + processed[count].order = count |
| 2279 | + count += 1 |
| 2280 | + upload_file.columns = processed |
| 2281 | + request.files.append(upload_file) |
| 2282 | + return False |
| 2283 | + |
2266 | 2284 | def process_column(self, column):
|
2267 | 2285 | """ process a column, check for the type, and return the processed
|
2268 | 2286 | column
|
|
0 commit comments