Skip to content

Commit b975864

Browse files
authored
Merge pull request #617 from superannotateai/1950_download_annotations
added download_annotations path expanduser logic, changed convert_pro…
2 parents 9c7dbc3 + 6d72892 commit b975864

File tree

7 files changed

+73
-239
lines changed

7 files changed

+73
-239
lines changed

docs/source/api_reference/helpers.rst

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@ _________________________________________________________________
1313
.. autofunction:: superannotate.import_annotation
1414
.. autofunction:: superannotate.export_annotation
1515
.. autofunction:: superannotate.convert_project_type
16-
.. autofunction:: superannotate.convert_json_version
1716

1817
----------
1918

src/superannotate/__init__.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,8 @@
99
import logging.config # noqa
1010
import requests # noqa
1111
from packaging.version import parse # noqa
12-
from superannotate.lib.app.input_converters import convert_json_version # noqa
1312
from superannotate.lib.app.input_converters import convert_project_type # noqa
1413
from superannotate.lib.app.exceptions import AppException # noqa
15-
from superannotate.lib.app.input_converters import convert_json_version # noqa
1614
from superannotate.lib.app.input_converters import convert_project_type # noqa
1715
from superannotate.lib.app.input_converters import export_annotation # noqa
1816
from superannotate.lib.app.input_converters import import_annotation # noqa
@@ -32,7 +30,6 @@
3230
"enums",
3331
"AppException",
3432
# converters
35-
"convert_json_version",
3633
"import_annotation",
3734
"export_annotation",
3835
"convert_project_type",

src/superannotate/lib/app/input_converters/__init__.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
1-
from .conversion import convert_json_version
21
from .conversion import convert_project_type
32
from .conversion import export_annotation
43
from .conversion import import_annotation
54

65

76
__all__ = [
8-
"convert_json_version",
97
"convert_project_type",
108
"export_annotation",
119
"import_annotation",

src/superannotate/lib/app/input_converters/conversion.py

Lines changed: 11 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -6,18 +6,18 @@
66
import tempfile
77
from argparse import Namespace
88
from pathlib import Path
9+
from typing import Union
910

1011
from lib.app.exceptions import AppException
1112
from lib.app.interface.base_interface import Tracker
12-
from lib.core import DEPRICATED_DOCUMENT_VIDEO_MESSAGE
1313
from lib.core import LIMITED_FUNCTIONS
1414
from lib.core.enums import ProjectType
15+
from typing_extensions import Literal
1516

1617
from .export_from_sa_conversions import export_from_sa
1718
from .import_to_sa_conversions import import_to_sa
18-
from .sa_conversion import degrade_json
1919
from .sa_conversion import sa_convert_project_type
20-
from .sa_conversion import upgrade_json
20+
2121

2222
ALLOWED_TASK_TYPES = [
2323
"panoptic_segmentation",
@@ -430,63 +430,26 @@ def import_annotation(
430430

431431

432432
@Tracker
433-
def convert_project_type(input_dir, output_dir):
433+
def convert_project_type(
434+
input_dir: Union[str, Path],
435+
output_dir: Union[str, Path],
436+
convert_to: Literal["Vector", "Pixel"],
437+
):
434438
"""Converts SuperAnnotate 'Vector' project type to 'Pixel' or reverse.
435439
436440
:param input_dir: Path to the dataset folder that you want to convert.
437441
:type input_dir: Pathlike(str or Path)
438442
:param output_dir: Path to the folder where you want to have converted files.
439443
:type output_dir: Pathlike(str or Path)
440-
444+
:param convert_to: the project type to which the current project should be converted.
445+
:type convert_to: str
441446
"""
442447
params_info = [
443448
(input_dir, "input_dir", (str, Path)),
444449
(output_dir, "output_dir", (str, Path)),
445450
]
446451
_passes_type_sanity(params_info)
447-
json_paths = list(Path(str(input_dir)).glob("*.json"))
448-
if (
449-
json_paths
450-
and "___pixel.json" not in json_paths[0].name
451-
and "___objects.json" not in json_paths[0].name
452-
):
453-
raise AppException(DEPRICATED_DOCUMENT_VIDEO_MESSAGE)
454-
455-
input_dir, output_dir = _change_type(input_dir, output_dir)
456-
457-
sa_convert_project_type(input_dir, output_dir)
458-
459-
460-
@Tracker
461-
def convert_json_version(input_dir, output_dir, version=2):
462-
"""
463-
Converts SuperAnnotate JSON versions. Newest JSON version is 2.
464-
465-
:param input_dir: Path to the dataset folder that you want to convert.
466-
:type input_dir: Pathlike(str or Path)
467-
:param output_dir: Path to the folder, where you want to have converted dataset.
468-
:type output_dir: Pathlike(str or Path)
469-
:param version: Output version number. Currently is either 1 or 2. Default value is 2. It will upgrade version 1 to version 2. Set 1 to degrade from version 2 to version 1.
470-
:type version: int
471452

472-
:return: List of converted files
473-
:rtype: list
474-
"""
475-
476-
params_info = [
477-
(input_dir, "input_dir", (str, Path)),
478-
(output_dir, "output_dir", (str, Path)),
479-
(version, "version", int),
480-
]
481-
_passes_type_sanity(params_info)
482453
input_dir, output_dir = _change_type(input_dir, output_dir)
483454

484-
output_dir.mkdir(parents=True, exist_ok=True)
485-
if version == 2:
486-
converted_files = upgrade_json(input_dir, output_dir)
487-
elif version == 1:
488-
converted_files = degrade_json(input_dir, output_dir)
489-
else:
490-
raise AppException("'version' is either 1 or 2.")
491-
492-
return converted_files
455+
sa_convert_project_type(input_dir, output_dir, convert_to)

src/superannotate/lib/app/input_converters/sa_conversion.py

Lines changed: 26 additions & 165 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import json
33
import logging
44
import shutil
5+
from pathlib import Path
56

67
import cv2
78
import numpy as np
@@ -23,12 +24,13 @@ def from_pixel_to_vector(json_paths, output_dir):
2324
img_names = []
2425

2526
for json_path in json_paths:
26-
file_name = str(json_path.name).replace("___pixel.json", "___objects.json")
27-
28-
mask_name = str(json_path).replace("___pixel.json", "___save.png")
27+
file_name = str(json_path)
28+
pixel_postfix = "___pixel.json"
29+
postfix = pixel_postfix if file_name.endswith(pixel_postfix) else ".json"
30+
mask_name = file_name.replace(postfix, "___save.png")
2931
img = cv2.imread(mask_name)
3032
H, W, _ = img.shape
31-
sa_json = json.load(open(json_path))
33+
sa_json = json.load(open(file_name))
3234
instances = sa_json["instances"]
3335
new_instances = []
3436
global_idx = itertools.count()
@@ -78,21 +80,24 @@ def from_pixel_to_vector(json_paths, output_dir):
7880
new_instances.append(temp)
7981

8082
sa_json["instances"] = new_instances
81-
write_to_json(output_dir / file_name, sa_json)
82-
img_names.append(file_name.replace("___objects.json", ""))
83+
write_to_json(
84+
str(output_dir / Path(file_name).name.replace(postfix, ".json")), sa_json
85+
)
86+
img_names.append(file_name.replace(postfix, ""))
8387
return img_names
8488

8589

8690
def from_vector_to_pixel(json_paths, output_dir):
8791
img_names = []
8892
for json_path in json_paths:
89-
file_name = str(json_path.name).replace("___objects.json", "___pixel.json")
90-
91-
img_name = str(json_path).replace("___objects.json", "")
93+
file_name = str(json_path)
94+
vector_postfix = "___objects.json"
95+
postfix = vector_postfix if file_name.endswith(vector_postfix) else ".json"
96+
img_name = file_name.replace(postfix, "")
9297
img = cv2.imread(img_name)
9398
H, W, _ = img.shape
9499

95-
sa_json = json.load(open(json_path))
100+
sa_json = json.load(open(file_name))
96101
instances = sa_json["instances"]
97102
mask = np.zeros((H, W, 4))
98103

@@ -155,16 +160,18 @@ def from_vector_to_pixel(json_paths, output_dir):
155160
instance["parts"] = parts
156161
sa_instances.append(instance.copy())
157162

158-
mask_name = file_name.replace("___pixel.json", "___save.png")
159-
cv2.imwrite(str(output_dir.joinpath(mask_name)), mask)
163+
mask_name = file_name.replace(postfix, "___save.png")
164+
cv2.imwrite(str(output_dir.joinpath(Path(mask_name).name)), mask)
160165

161166
sa_json["instances"] = sa_instances
162-
write_to_json(output_dir / file_name, sa_json)
163-
img_names.append(file_name.replace("___pixel.json", ""))
167+
write_to_json(
168+
str(output_dir / Path(file_name).name.replace(postfix, ".json")), sa_json
169+
)
170+
img_names.append(img_name.replace(".json", ""))
164171
return img_names
165172

166173

167-
def sa_convert_project_type(input_dir, output_dir):
174+
def sa_convert_project_type(input_dir, output_dir, convert_to):
168175
json_paths = list(input_dir.glob("*.json"))
169176

170177
output_dir.joinpath("classes").mkdir(parents=True)
@@ -173,158 +180,12 @@ def sa_convert_project_type(input_dir, output_dir):
173180
output_dir.joinpath("classes", "classes.json"),
174181
)
175182

176-
if "___pixel.json" in json_paths[0].name:
183+
if convert_to == "Vector":
177184
img_names = from_pixel_to_vector(json_paths, output_dir)
178-
elif "___objects.json" in json_paths[0].name:
185+
elif convert_to == "Pixel":
179186
img_names = from_vector_to_pixel(json_paths, output_dir)
180-
elif ".json" in json_paths[0].name:
181-
raise AppException(DEPRICATED_DOCUMENT_VIDEO_MESSAGE)
182187
else:
183-
raise AppException(
184-
"'input_dir' should contain JSON files with '[IMAGE_NAME]___objects.json' or '[IMAGE_NAME]___pixel.json' names structure.",
185-
)
188+
raise AppException(DEPRICATED_DOCUMENT_VIDEO_MESSAGE)
186189

187190
for img_name in img_names:
188-
copy_file(input_dir.joinpath(img_name), output_dir.joinpath(img_name))
189-
190-
191-
def upgrade_json(input_dir, output_dir):
192-
files_list = list(input_dir.glob("*.json"))
193-
ptype = "Vector"
194-
if "___pixel" in str(files_list[0].name):
195-
ptype = "Pixel"
196-
197-
converted_files = []
198-
failed_files = []
199-
for file in files_list:
200-
file_name = file.name
201-
try:
202-
output_json = _update_json_format(file, ptype)
203-
converted_files.append(file_name)
204-
write_to_json(output_dir / file_name, output_json)
205-
except Exception as e:
206-
logger.debug(str(e), exc_info=True)
207-
failed_files.append(file_name)
208-
209-
return converted_files
210-
211-
212-
def degrade_json(input_dir, output_dir):
213-
files_list = list(input_dir.glob("*.json"))
214-
215-
converted_files = []
216-
failed_files = []
217-
for file in files_list:
218-
file_name = file.name
219-
try:
220-
output_json = _degrade_json_format(file)
221-
converted_files.append(output_dir / file_name)
222-
write_to_json(output_dir / file_name, output_json)
223-
except Exception as e:
224-
failed_files.append(file_name)
225-
226-
return converted_files
227-
228-
229-
def _update_json_format(old_json_path, project_type):
230-
old_json_data = json.load(open(old_json_path))
231-
new_json_data = {"metadata": {}, "instances": [], "tags": [], "comments": []}
232-
233-
meta_keys = [
234-
"name",
235-
"width",
236-
"height",
237-
"status",
238-
"pinned",
239-
"isPredicted",
240-
"projectId",
241-
"annotatorEmail",
242-
"qaEmail",
243-
]
244-
if project_type == "Pixel":
245-
meta_keys.append("isSegmented")
246-
247-
new_json_data["metadata"] = dict.fromkeys(meta_keys)
248-
249-
suffix = "___objects.json" if project_type == "Vector" else "___pixel.json"
250-
image_name = str(old_json_path.name).split(suffix)[0]
251-
metadata = new_json_data["metadata"]
252-
metadata["name"] = image_name
253-
254-
for item in old_json_data:
255-
object_type = item.get("type")
256-
if object_type == "meta":
257-
meta_name = item["name"]
258-
if meta_name == "imageAttributes":
259-
metadata["height"] = item.get("height")
260-
metadata["width"] = item.get("width")
261-
metadata["status"] = item.get("status")
262-
metadata["pinned"] = item.get("pinned")
263-
if meta_name == "lastAction":
264-
metadata["lastAction"] = dict.fromkeys(["email", "timestamp"])
265-
metadata["lastAction"]["email"] = item.get("userId")
266-
metadata["lastAction"]["timestamp"] = item.get("timestamp")
267-
elif object_type == "tag":
268-
new_json_data["tags"].append(item.get("name"))
269-
elif object_type == "comment":
270-
item.pop("type")
271-
item["correspondence"] = item["comments"]
272-
for comment in item["correspondence"]:
273-
comment["email"] = comment["id"]
274-
comment.pop("id")
275-
item.pop("comments")
276-
new_json_data["comments"].append(item)
277-
else:
278-
new_json_data["instances"].append(item)
279-
280-
return new_json_data
281-
282-
283-
def _degrade_json_format(new_json_path):
284-
sa_loader = []
285-
new_json_data = json.load(open(new_json_path))
286-
287-
meta = {"type": "meta", "name": "imageAttributes"}
288-
meta_keys = ["height", "width", "status", "pinned"]
289-
for meta_key in meta_keys:
290-
if meta_key in new_json_data["metadata"]:
291-
meta[meta_key] = new_json_data["metadata"][meta_key]
292-
sa_loader.append(meta)
293-
294-
if "lastAction" in new_json_data["metadata"]:
295-
meta = {
296-
"type": "meta",
297-
"name": "lastAction",
298-
"userId": new_json_data["metadata"]["lastAction"]["email"],
299-
"timestamp": new_json_data["metadata"]["lastAction"]["timestamp"],
300-
}
301-
sa_loader.append(meta)
302-
303-
for item in new_json_data["instances"]:
304-
sa_loader.append(item)
305-
306-
for item in new_json_data["comments"]:
307-
comments = []
308-
for item2 in item["correspondence"]:
309-
comments.append({"text": item2["text"], "id": item2["email"]})
310-
item["comments"] = comments
311-
item["createdAt"] = item["correspondence"][0]["timestamp"]
312-
item["createdBy"] = {
313-
"email": item["correspondence"][0]["email"],
314-
"role": item["correspondence"][0]["role"],
315-
}
316-
item["updatedAt"] = item["correspondence"][-1]["timestamp"]
317-
item["updatedBy"] = {
318-
"email": item["correspondence"][-1]["email"],
319-
"role": item["correspondence"][-1]["role"],
320-
}
321-
item.pop("correspondence")
322-
item["type"] = "comment"
323-
item["comments"] = comments
324-
sa_loader.append(item)
325-
326-
for item in new_json_data["tags"]:
327-
tag = {"type": "tag", "name": item}
328-
sa_loader.append(tag)
329-
330-
return sa_loader
191+
copy_file(img_name, output_dir / Path(img_name).name)

0 commit comments

Comments
 (0)