|
6 | 6 | import zipfile |
7 | 7 | from datetime import datetime |
8 | 8 | from pathlib import Path |
| 9 | +import shutil |
9 | 10 |
|
10 | 11 | import boto3 |
11 | 12 | import requests |
12 | 13 | from tqdm import tqdm |
13 | 14 |
|
14 | 15 | from ..api import API |
15 | | -from ..common import annotation_status_str_to_int |
| 16 | +from ..common import annotation_status_str_to_int, upload_state_int_to_str |
16 | 17 | from ..exceptions import ( |
17 | 18 | SABaseException, SAExistingExportNameException, |
18 | 19 | SANonExistingExportNameException |
@@ -123,6 +124,12 @@ def prepare_export( |
123 | 124 | """ |
124 | 125 | if not isinstance(project, dict): |
125 | 126 | project = get_project_metadata_bare(project) |
| 127 | + upload_state = upload_state_int_to_str(project.get("upload_state")) |
| 128 | + if upload_state == "External" and include_fuse == True: |
| 129 | + logger.info( |
| 130 | + "Include fuse functionality is not supported for projects containing images attached with URLs" |
| 131 | + ) |
| 132 | + include_fuse = False |
126 | 133 | team_id, project_id = project["team_id"], project["id"] |
127 | 134 | if annotation_statuses is None: |
128 | 135 | annotation_statuses = [2, 3, 4, 5] |
@@ -203,6 +210,15 @@ def __upload_files_to_aws_thread( |
203 | 210 | already_uploaded[i] = True |
204 | 211 |
|
205 | 212 |
|
| 213 | +def _download_file(url, local_filename): |
| 214 | + with requests.get(url, stream=True) as r: |
| 215 | + r.raise_for_status() |
| 216 | + with open(local_filename, 'wb') as f: |
| 217 | + for chunk in r.iter_content(chunk_size=8192): |
| 218 | + f.write(chunk) |
| 219 | + return local_filename |
| 220 | + |
| 221 | + |
206 | 222 | def download_export( |
207 | 223 | project, export, folder_path, extract_zip_contents=True, to_s3_bucket=None |
208 | 224 | ): |
@@ -237,25 +253,26 @@ def download_export( |
237 | 253 | break |
238 | 254 |
|
239 | 255 | filename = Path(res['path']).name |
240 | | - r = requests.get(res['download'], allow_redirects=True) |
241 | | - if to_s3_bucket is None: |
242 | | - filepath = Path(folder_path) / filename |
243 | | - open(filepath, 'wb').write(r.content) |
244 | | - if extract_zip_contents: |
245 | | - with zipfile.ZipFile(filepath, 'r') as f: |
246 | | - f.extractall(folder_path) |
247 | | - Path.unlink(filepath) |
248 | | - logger.info("Extracted %s to folder %s", filepath, folder_path) |
249 | | - else: |
250 | | - logger.info("Downloaded export ID %s to %s", res['id'], filepath) |
251 | | - else: |
252 | | - with tempfile.TemporaryDirectory() as tmpdirname: |
253 | | - filepath = Path(tmpdirname) / filename |
254 | | - open(filepath, 'wb').write(r.content) |
| 256 | + with tempfile.TemporaryDirectory() as tmpdirname: |
| 257 | + temp_filepath = Path(tmpdirname) / filename |
| 258 | + _download_file(res['download'], temp_filepath) |
| 259 | + if to_s3_bucket is None: |
| 260 | + filepath = Path(folder_path) / filename |
| 261 | + shutil.copyfile(temp_filepath, filepath) |
255 | 262 | if extract_zip_contents: |
256 | 263 | with zipfile.ZipFile(filepath, 'r') as f: |
257 | | - f.extractall(tmpdirname) |
| 264 | + f.extractall(folder_path) |
258 | 265 | Path.unlink(filepath) |
| 266 | + logger.info("Extracted %s to folder %s", filepath, folder_path) |
| 267 | + else: |
| 268 | + logger.info( |
| 269 | + "Downloaded export ID %s to %s", res['id'], filepath |
| 270 | + ) |
| 271 | + else: |
| 272 | + if extract_zip_contents: |
| 273 | + with zipfile.ZipFile(temp_filepath, 'r') as f: |
| 274 | + f.extractall(tmpdirname) |
| 275 | + Path.unlink(temp_filepath) |
259 | 276 | files_to_upload = [] |
260 | 277 | for file in Path(tmpdirname).rglob("*.*"): |
261 | 278 | if not file.is_file(): |
@@ -290,4 +307,4 @@ def download_export( |
290 | 307 | t.join() |
291 | 308 | finish_event.set() |
292 | 309 | tqdm_thread.join() |
293 | | - logger.info("Exported to AWS %s/%s", to_s3_bucket, folder_path) |
| 310 | + logger.info("Exported to AWS %s/%s", to_s3_bucket, folder_path) |
0 commit comments