Skip to content

Commit 18d32c4

Browse files
authored
Merge pull request #438 from superannotateai/friday
Friday
2 parents fb3cb4d + da4be87 commit 18d32c4

File tree

15 files changed

+619
-331
lines changed

15 files changed

+619
-331
lines changed

pytest.ini

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@
22
minversion = 3.7
33
log_cli=true
44
python_files = test_*.py
5-
;addopts = -n auto --dist=loadscope
5+
addopts = -n auto --dist=loadscope

requirements_dev.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
superannotate_schemas>=v1.0.43dev3
1+
superannotate_schemas>=v1.0.43dev5
22

src/superannotate/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -211,8 +211,7 @@
211211

212212
__author__ = "Superannotate"
213213

214-
WORKING_DIR = os.path.split(os.path.realpath(__file__))[0]
215-
sys.path.append(WORKING_DIR)
214+
sys.path.append(os.path.split(os.path.realpath(__file__))[0])
216215
logging.getLogger("botocore").setLevel(logging.CRITICAL)
217216
logger = get_default_logger()
218217

src/superannotate/lib/app/analytics/aggregators.py

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,22 @@ class DocumentRawData:
8585

8686

8787
class DataAggregator:
88+
MAPPERS = {
89+
"event": lambda annotation: None,
90+
"bbox": lambda annotation: annotation["points"],
91+
"polygon": lambda annotation: annotation["points"],
92+
"polyline": lambda annotation: annotation["points"],
93+
"cuboid": lambda annotation: annotation["points"],
94+
"point": lambda annotation: {"x": annotation["x"], "y": annotation["y"]},
95+
"annotation_type": lambda annotation: dict(
96+
cx=annotation["cx"],
97+
cy=annotation["cy"],
98+
rx=annotation["rx"],
99+
ry=annotation["ry"],
100+
angle=annotation["angle"],
101+
),
102+
}
103+
88104
def __init__(
89105
self,
90106
project_type: str,
@@ -186,11 +202,12 @@ def aggregate_video_annotations_as_df(self, annotation_paths: List[str]):
186202
# append instances
187203
instances = annotation_data.get("instances", [])
188204
for idx, instance in enumerate(instances):
205+
instance_type = instance["meta"].get("type", "event")
189206
instance_raw = copy.copy(raw_data)
190207
instance_raw.instanceId = int(idx)
191208
instance_raw.instanceStart = instance["meta"].get("start")
192209
instance_raw.instanceEnd = instance["meta"].get("end")
193-
instance_raw.type = instance["meta"].get("type")
210+
instance_raw.type = instance_type
194211
instance_raw.className = instance["meta"].get("className")
195212
instance_raw.createdAt = instance["meta"].get("createdAt")
196213
instance_raw.createdBy = (
@@ -217,7 +234,7 @@ def aggregate_video_annotations_as_df(self, annotation_paths: List[str]):
217234
for timestamp_id, timestamp in enumerate(timestamps):
218235
timestamp_raw = copy.copy(parameter_raw)
219236
timestamp_raw.timestampId = timestamp_id
220-
timestamp_raw.meta = timestamp.get("points")
237+
timestamp_raw.meta = self.MAPPERS[instance_type](timestamp)
221238
attributes = timestamp.get("attributes", [])
222239
for attribute_id, attribute in enumerate(attributes):
223240
attribute_raw = copy.copy(timestamp_raw)
@@ -467,8 +484,7 @@ def __append_annotation(annotation_dict):
467484
]
468485
):
469486
logger.warning(
470-
"Annotation class group value %s not in classes json. Skipping.",
471-
attribute_name,
487+
f"Annotation class group value {attribute_name} not in classes json. Skipping."
472488
)
473489
continue
474490
annotation_dict = {

src/superannotate/lib/app/interface/sdk_interface.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2144,6 +2144,8 @@ def upload_image_to_project(
21442144
raise AppException(response.errors)
21452145

21462146

2147+
@Trackable
2148+
@validate_arguments
21472149
def search_models(
21482150
name: Optional[NotEmptyStr] = None,
21492151
type_: Optional[NotEmptyStr] = None,
@@ -2303,7 +2305,7 @@ def delete_annotations(
23032305
23042306
:param project: project name or folder path (e.g., "project1/folder1")
23052307
:type project: str
2306-
:param item_names: image names. If None, all image annotations from a given project/folder will be deleted.
2308+
:param item_names: item names. If None, all item annotations from a given project/folder will be deleted.
23072309
:type item_names: list of strs
23082310
"""
23092311

src/superannotate/lib/core/data_handlers.py

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def get_annotation_class(self, name: str) -> AnnotationClass:
4747

4848
@lru_cache()
4949
def get_attribute_group(
50-
self, annotation_class: AnnotationClass, attr_group_name: str
50+
self, annotation_class: AnnotationClass, attr_group_name: str
5151
) -> AttributeGroup:
5252
for attr_group in annotation_class.attribute_groups:
5353
if attr_group.name == attr_group_name:
@@ -114,10 +114,10 @@ def handle(self, annotation: dict):
114114

115115
class MissingIDsHandler(BaseAnnotationDateHandler):
116116
def __init__(
117-
self,
118-
annotation_classes: List[AnnotationClass],
119-
templates: List[dict],
120-
reporter: Reporter,
117+
self,
118+
annotation_classes: List[AnnotationClass],
119+
templates: List[dict],
120+
reporter: Reporter,
121121
):
122122
super().__init__(annotation_classes)
123123
self.validate_existing_classes(annotation_classes)
@@ -187,7 +187,7 @@ def handle(self, annotation: dict):
187187
template["name"]: template["id"] for template in self._templates
188188
}
189189
for annotation_instance in (
190-
i for i in annotation["instances"] if i.get("type", None) == "template"
190+
i for i in annotation["instances"] if i.get("type", None) == "template"
191191
):
192192
annotation_instance["templateId"] = template_name_id_map.get(
193193
annotation_instance.get("templateName", ""), -1
@@ -244,10 +244,14 @@ def _point_handler(time_stamp):
244244
HANDLERS: Dict[str, Callable] = {
245245
AnnotationTypes.EVENT: lambda timestamp: {},
246246
AnnotationTypes.BBOX: lambda timestamp: {"points": timestamp["points"]},
247-
AnnotationTypes.POINT: lambda timestamp: {"x": timestamp["x"], "y": timestamp["y"]},
247+
AnnotationTypes.POINT: lambda timestamp: {
248+
"x": timestamp["x"],
249+
"y": timestamp["y"],
250+
},
248251
AnnotationTypes.POLYLINE: lambda timestamp: {"points": timestamp["points"]},
249252
AnnotationTypes.POLYGON: lambda timestamp: {
250-
"points": timestamp["points"], "exclude": timestamp.get("exclude", [])
253+
"points": timestamp["points"],
254+
"exclude": timestamp.get("exclude", []),
251255
},
252256
}
253257

@@ -315,7 +319,9 @@ def convert_timestamp(timestamp):
315319
editor_instance["timeline"][timestamp]["active"] = False
316320
handler: Callable = self.HANDLERS.get(meta["type"])
317321
if handler:
318-
editor_instance["timeline"][timestamp].update(handler(timestamp_data))
322+
editor_instance["timeline"][timestamp].update(
323+
handler(timestamp_data)
324+
)
319325
# if timestamp_data.get("points", None):
320326
# editor_instance["timeline"][timestamp][
321327
# "points"
@@ -357,10 +363,10 @@ def convert_timestamp(timestamp):
357363
(group_name, attr_name)
358364
)
359365
attributes_to_add = (
360-
existing_attributes_in_current_instance - active_attributes
366+
existing_attributes_in_current_instance - active_attributes
361367
)
362368
attributes_to_delete = (
363-
active_attributes - existing_attributes_in_current_instance
369+
active_attributes - existing_attributes_in_current_instance
364370
)
365371
if attributes_to_add or attributes_to_delete:
366372
editor_instance["timeline"][timestamp][

src/superannotate/lib/core/usecases/images.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1620,7 +1620,7 @@ def execute(self) -> Response:
16201620
polling_states[response.get("poll_id")] = False
16211621

16221622
if not polling_states:
1623-
self._response.errors = AppException("Invalid image names or empty folder.")
1623+
self._response.errors = AppException("Invalid item names or empty folder.")
16241624
else:
16251625
for poll_id in polling_states:
16261626
timeout_start = time.time()

src/superannotate/lib/core/usecases/models.py

Lines changed: 24 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
import boto3
1010
import lib.core as constances
11+
from lib.core.enums import ProjectType
1112
import pandas as pd
1213
import requests
1314
from botocore.exceptions import ClientError
@@ -68,7 +69,8 @@ def validate_fuse(self):
6869
and self._include_fuse
6970
):
7071
raise AppValidationException(
71-
f"Include fuse functionality is not supported for projects containing {self._project.type} attached with URLs"
72+
"Include fuse functionality is not supported for projects containing "
73+
f"{ProjectType.get_name(self._project.type)} attached with URLs"
7274
)
7375

7476
def validate_folder_names(self):
@@ -202,17 +204,16 @@ def upload_to_s3_from_folder(self, source: str, folder_path: str):
202204

203205
def _upload_file_to_s3(_to_s3_bucket, _path, _s3_key) -> None:
204206
_to_s3_bucket.upload_file(str(_path), _s3_key)
205-
self.reporter.update_progress()
206207

207208
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
208209
results = []
209-
self.reporter.start_progress(len(files_to_upload), "Uploading")
210+
self.reporter.start_spinner()
210211
for path in files_to_upload:
211212
s3_key = f"{folder_path + '/' if folder_path else ''}{str(Path(path).relative_to(Path(source)))}"
212213
results.append(
213214
executor.submit(_upload_file_to_s3, to_s3_bucket, path, s3_key)
214215
)
215-
self.reporter.finish_progress()
216+
self.reporter.stop_spinner()
216217

217218
def download_to_local_storage(self, destination: str, extract_zip=False):
218219
exports = self._service.get_exports(
@@ -227,22 +228,25 @@ def download_to_local_storage(self, destination: str, extract_zip=False):
227228
if not export:
228229
raise AppException("Export not found.")
229230
export_status = export["status"]
230-
231-
while export_status != ExportStatus.COMPLETE.value:
232-
logger.info("Waiting 5 seconds for export to finish on server.")
233-
time.sleep(5)
234-
235-
export = self._service.get_export(
236-
team_id=self._project.team_id,
237-
project_id=self._project.id,
238-
export_id=export["id"],
239-
)
240-
if "error" in export:
241-
raise AppException(export["error"])
242-
export_status = export["status"]
243-
if export_status in (ExportStatus.ERROR.value, ExportStatus.CANCELED.value):
244-
raise AppException("Couldn't download export.")
245-
231+
if export_status != ExportStatus.COMPLETE.value:
232+
logger.info("Waiting for export to finish on server.")
233+
self.reporter.start_spinner()
234+
while export_status != ExportStatus.COMPLETE.value:
235+
export = self._service.get_export(
236+
team_id=self._project.team_id,
237+
project_id=self._project.id,
238+
export_id=export["id"],
239+
)
240+
if "error" in export:
241+
raise AppException(export["error"])
242+
export_status = export["status"]
243+
if export_status in (
244+
ExportStatus.ERROR.value,
245+
ExportStatus.CANCELED.value,
246+
):
247+
self.reporter.stop_spinner()
248+
raise AppException("Couldn't download export.")
249+
self.reporter.stop_spinner()
246250
filename = Path(export["path"]).name
247251
filepath = Path(destination) / filename
248252
with requests.get(export["download"], stream=True) as response:

0 commit comments

Comments
 (0)