Skip to content

Commit a687ce2

Browse files
committed
Add download export progress
1 parent 65d5dae commit a687ce2

File tree

3 files changed

+34
-13
lines changed

3 files changed

+34
-13
lines changed

src/superannotate/lib/app/interface/sdk_interface.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2153,13 +2153,23 @@ def download_export(
21532153
"""
21542154
project_name, folder_name = extract_project_folder(project)
21552155
export_name = export["name"] if isinstance(export, dict) else export
2156-
controller.download_export(
2156+
2157+
use_case = controller.download_export(
21572158
project_name=project_name,
21582159
export_name=export_name,
21592160
folder_path=folder_path,
21602161
extract_zip_contents=extract_zip_contents,
21612162
to_s3_bucket=to_s3_bucket,
21622163
)
2164+
if use_case.is_valid():
2165+
if to_s3_bucket:
2166+
with tqdm(
2167+
total=use_case.get_upload_files_count(), desc="Uploading"
2168+
) as progress_bar:
2169+
for _ in use_case.execute():
2170+
progress_bar.update(1)
2171+
else:
2172+
use_case.execute()
21632173

21642174

21652175
@Trackable

src/superannotate/lib/core/usecases.py

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import logging
66
import os.path
77
import random
8+
import tempfile
89
import time
910
import uuid
1011
import zipfile
@@ -21,7 +22,6 @@
2122
import numpy as np
2223
import pandas as pd
2324
import requests
24-
import tempfile
2525
from boto3.exceptions import Boto3Error
2626
from lib.app.analytics.common import aggregate_annotations_as_df
2727
from lib.app.analytics.common import consensus_plot
@@ -382,7 +382,9 @@ def execute(self):
382382
if self._include_workflow:
383383
new_workflows = self._workflows_repo(self._backend_service, project)
384384
for workflow in self.workflows.get_all():
385-
existing_workflow_ids = list(map(lambda i: i.uuid, new_workflows.get_all()))
385+
existing_workflow_ids = list(
386+
map(lambda i: i.uuid, new_workflows.get_all())
387+
)
386388
workflow_data = copy.copy(workflow)
387389
workflow_data.project_id = project.uuid
388390
workflow_data.class_id = annotation_classes_mapping[
@@ -391,7 +393,9 @@ def execute(self):
391393
new_workflows.insert(workflow_data)
392394
workflows = new_workflows.get_all()
393395
new_workflow = [
394-
work_flow for work_flow in workflows if work_flow.uuid not in existing_workflow_ids
396+
work_flow
397+
for work_flow in workflows
398+
if work_flow.uuid not in existing_workflow_ids
395399
][0]
396400
workflow_attributes = []
397401
for attribute in workflow_data.attribute:
@@ -3960,6 +3964,7 @@ def __init__(
39603964
self._folder_path = folder_path
39613965
self._extract_zip_contents = extract_zip_contents
39623966
self._to_s3_bucket = to_s3_bucket
3967+
self._temp_dir = None
39633968

39643969
def validate_project_type(self):
39653970
if self._project.project_type in constances.LIMITED_FUNCTIONS:
@@ -3979,10 +3984,9 @@ def _upload_file_to_s3(_to_s3_bucket, _path, _s3_key) -> None:
39793984
for path in files_to_upload:
39803985
s3_key = f"{self._folder_path}/{path.name}"
39813986
results.append(
3982-
executor.submit(
3983-
_upload_file_to_s3, to_s3_bucket, str(path), s3_key
3984-
)
3987+
executor.submit(_upload_file_to_s3, to_s3_bucket, str(path), s3_key)
39853988
)
3989+
yield
39863990

39873991
def download_to_local_storage(self, destination: str):
39883992
exports = self._service.get_exports(
@@ -4024,15 +4028,23 @@ def download_to_local_storage(self, destination: str):
40244028
Path.unlink(filepath)
40254029
return export["id"], filepath, destination
40264030

4031+
def get_upload_files_count(self):
4032+
if not self._temp_dir:
4033+
self._temp_dir = tempfile.TemporaryDirectory()
4034+
self.download_to_local_storage(self._temp_dir.name)
4035+
return len(list(Path(self._temp_dir.name).rglob("*.*")))
4036+
40274037
def execute(self):
40284038
if self.is_valid():
40294039
if self._to_s3_bucket:
4030-
with tempfile.TemporaryDirectory() as tmp:
4031-
self.download_to_local_storage(tmp)
4032-
self.upload_to_s3_from_folder(tmp)
4040+
self.get_upload_files_count()
4041+
yield from self.upload_to_s3_from_folder(self._temp_dir.name)
40334042
logger.info(f"Exported to AWS {self._to_s3_bucket}/{self._folder_path}")
4043+
self._temp_dir.cleanup()
40344044
else:
4035-
export_id, filepath, destination = self.download_to_local_storage(self._folder_path)
4045+
export_id, filepath, destination = self.download_to_local_storage(
4046+
self._folder_path
4047+
)
40364048
if self._extract_zip_contents:
40374049
logger.info(f"Extracted {filepath} to folder {destination}")
40384050
else:

src/superannotate/lib/infrastructure/controller.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1382,15 +1382,14 @@ def download_export(
13821382
to_s3_bucket: bool,
13831383
):
13841384
project = self._get_project(project_name)
1385-
use_case = usecases.DownloadExportUseCase(
1385+
return usecases.DownloadExportUseCase(
13861386
service=self._backend_client,
13871387
project=project,
13881388
export_name=export_name,
13891389
folder_path=folder_path,
13901390
extract_zip_contents=extract_zip_contents,
13911391
to_s3_bucket=to_s3_bucket,
13921392
)
1393-
return use_case.execute()
13941393

13951394
def download_ml_model(self, model_data: dict, download_path: str):
13961395
model = MLModelEntity(

0 commit comments

Comments
 (0)