1919from lib .core .serviceproviders import BaseAnnotationService
2020from lib .infrastructure .stream_data_handler import StreamedAnnotations
2121from pydantic import parse_obj_as
22+ from superannotate .lib .infrastructure .services .http_client import AIOHttpSession
2223
2324logger = logging .getLogger ("sa" )
2425
2526
2627class AnnotationService (BaseAnnotationService ):
27- ASSETS_PROVIDER_VERSION = "v2 .01"
28+ ASSETS_PROVIDER_VERSION = "v3 .01"
2829 DEFAULT_CHUNK_SIZE = 5000
2930
3031 URL_GET_ANNOTATIONS = "items/annotations/download"
@@ -71,13 +72,12 @@ async def _sync_large_annotation(self, team_id, project_id, item_id):
7172 self .assets_provider_url ,
7273 self .URL_START_FILE_SYNC .format (item_id = item_id ),
7374 )
74- async with aiohttp . ClientSession (
75+ async with AIOHttpSession (
7576 connector = aiohttp .TCPConnector (ssl = False ),
7677 headers = self .client .default_headers ,
77- raise_for_status = True ,
7878 ) as session :
79- await session .post ( sync_url , params = sync_params )
80-
79+ _response = await session .request ( "post" , sync_url , params = sync_params )
80+ _response . raise_for_status ()
8181 sync_params .pop ("current_source" )
8282 sync_params .pop ("desired_source" )
8383
@@ -115,12 +115,12 @@ async def get_big_annotation(
115115 team_id = project .team_id , project_id = project .id , item_id = item .id
116116 )
117117
118- async with aiohttp . ClientSession (
118+ async with AIOHttpSession (
119119 connector = aiohttp .TCPConnector (ssl = False ),
120120 headers = self .client .default_headers ,
121- raise_for_status = True ,
122121 ) as session :
123- start_response = await session .post (url , params = query_params )
122+ start_response = await session .request ("post" , url , params = query_params )
123+ start_response .raise_for_status ()
124124 large_annotation = await start_response .json ()
125125
126126 reporter .update_progress ()
@@ -162,8 +162,8 @@ def get_upload_chunks(
162162 response = self .client .request (
163163 url = urljoin (self .assets_provider_url , self .URL_CLASSIFY_ITEM_SIZE ),
164164 method = "POST" ,
165- params = {"project_id" : project . id , " limit" : len (item_ids )},
166- data = {"item_ids" : item_ids },
165+ params = {"limit" : len (item_ids )},
166+ data = {"project_id" : project . id , " item_ids" : item_ids },
167167 )
168168 if not response .ok :
169169 raise AppException (response .error )
@@ -198,12 +198,12 @@ async def download_big_annotation(
198198 team_id = project .team_id , project_id = project .id , item_id = item_id
199199 )
200200
201- async with aiohttp . ClientSession (
201+ async with AIOHttpSession (
202202 connector = aiohttp .TCPConnector (ssl = False ),
203203 headers = self .client .default_headers ,
204- raise_for_status = True ,
205204 ) as session :
206- start_response = await session .post (url , params = query_params )
205+ start_response = await session .request ("post" , url , params = query_params )
206+ start_response .raise_for_status ()
207207 res = await start_response .json ()
208208 Path (download_path ).mkdir (exist_ok = True , parents = True )
209209
@@ -246,40 +246,45 @@ async def upload_small_annotations(
246246 self ,
247247 project : entities .ProjectEntity ,
248248 folder : entities .FolderEntity ,
249- items_name_file_map : Dict [str , io . StringIO ],
249+ items_name_data_map : Dict [str , dict ],
250250 ) -> UploadAnnotationsResponse :
251251 url = urljoin (
252252 self .assets_provider_url ,
253253 (
254- f"{ self .URL_UPLOAD_ANNOTATIONS } ?{ '&' .join (f'image_names[]={ item_name } ' for item_name in items_name_file_map .keys ())} "
254+ f"{ self .URL_UPLOAD_ANNOTATIONS } ?{ '&' .join (f'image_names[]={ item_name } ' for item_name in items_name_data_map .keys ())} "
255255 ),
256256 )
257257
258258 headers = copy .copy (self .client .default_headers )
259259 del headers ["Content-Type" ]
260- async with aiohttp . ClientSession (
260+ async with AIOHttpSession (
261261 headers = headers ,
262262 connector = aiohttp .TCPConnector (ssl = False ),
263- raise_for_status = True ,
264263 ) as session :
265- data = aiohttp .FormData (quote_fields = False )
266- for key , file in items_name_file_map .items ():
267- file .seek (0 )
268- data .add_field (
264+ form_data = aiohttp .FormData (
265+ quote_fields = False ,
266+ )
267+ tmp = {}
268+ for name , data in items_name_data_map .items ():
269+ tmp [name ] = io .StringIO ()
270+ json .dump ({"data" : data }, tmp [name ], allow_nan = False )
271+ tmp [name ].seek (0 )
272+
273+ for key , data in tmp .items ():
274+ form_data .add_field (
269275 key ,
270- bytes ( file . read (), "ascii" ) ,
276+ data ,
271277 filename = key ,
272278 content_type = "application/json" ,
273279 )
274280
275- _response = await session .post (
276- url ,
277- params = {
278- "team_id" : project .team_id ,
279- "project_id" : project .id ,
280- "folder_id" : folder .id ,
281- },
282- data = data ,
281+ params = {
282+ "team_id" : project .team_id ,
283+ "project_id" : project .id ,
284+ "folder_id" : folder .id ,
285+ }
286+ _response = await session .request (
287+ "post" , url , params = params , data = form_data
283288 )
284289 if not _response .ok :
285290 logger .debug (f"Status code { str (_response .status )} " )
@@ -301,23 +306,20 @@ async def upload_big_annotation(
301306 data : io .StringIO ,
302307 chunk_size : int ,
303308 ) -> bool :
304- async with aiohttp . ClientSession (
309+ async with AIOHttpSession (
305310 connector = aiohttp .TCPConnector (ssl = False ),
306311 headers = self .client .default_headers ,
307- raise_for_status = True ,
308312 ) as session :
309313 params = {
310314 "team_id" : project .team_id ,
311315 "project_id" : project .id ,
312316 "folder_id" : folder .id ,
313317 }
314- start_response = await session .post (
315- urljoin (
316- self .assets_provider_url ,
317- self .URL_START_FILE_UPLOAD_PROCESS .format (item_id = item_id ),
318- ),
319- params = params ,
318+ url = urljoin (
319+ self .assets_provider_url ,
320+ self .URL_START_FILE_UPLOAD_PROCESS .format (item_id = item_id ),
320321 )
322+ start_response = await session .request ("post" , url , params = params )
321323 if not start_response .ok :
322324 raise AppException (str (await start_response .text ()))
323325 process_info = await start_response .json ()
@@ -331,7 +333,8 @@ async def upload_big_annotation(
331333 params ["chunk_id" ] = chunk_id
332334 if chunk :
333335 data_sent = True
334- response = await session .post (
336+ response = await session .request (
337+ "post" ,
335338 urljoin (
336339 self .assets_provider_url ,
337340 self .URL_START_FILE_SEND_PART .format (item_id = item_id ),
@@ -348,7 +351,8 @@ async def upload_big_annotation(
348351 if len (chunk ) < chunk_size :
349352 break
350353 del params ["chunk_id" ]
351- response = await session .post (
354+ response = await session .request (
355+ "post" ,
352356 urljoin (
353357 self .assets_provider_url ,
354358 self .URL_START_FILE_SEND_FINISH .format (item_id = item_id ),
@@ -359,7 +363,8 @@ async def upload_big_annotation(
359363 if not response .ok :
360364 raise AppException (str (await response .text ()))
361365 del params ["path" ]
362- response = await session .post (
366+ response = await session .request (
367+ "post" ,
363368 urljoin (
364369 self .assets_provider_url ,
365370 self .URL_START_FILE_SYNC .format (item_id = item_id ),
@@ -370,7 +375,8 @@ async def upload_big_annotation(
370375 if not response .ok :
371376 raise AppException (str (await response .text ()))
372377 while True :
373- response = await session .get (
378+ response = await session .request (
379+ "get" ,
374380 urljoin (
375381 self .assets_provider_url ,
376382 self .URL_START_FILE_SYNC_STATUS .format (item_id = item_id ),
0 commit comments