Skip to content

Commit 905bdbd

Browse files
committed
RDBC-811 Make load_starting_with_into_stream work properly
1 parent a8abeef commit 905bdbd

File tree

3 files changed

+95
-50
lines changed

3 files changed

+95
-50
lines changed

ravendb/documents/commands/results.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from __future__ import annotations
2-
from typing import Union, Optional, List, Dict
2+
from typing import Union, Optional, List, Dict, Any
33

44

55
class GetDocumentResult:
@@ -39,3 +39,13 @@ def from_json(cls, json_dict: dict) -> GetDocumentsResult:
3939
json_dict.get("CompareExchangeValueIncludes", None),
4040
json_dict.get("NextPageStart", None),
4141
)
42+
43+
def to_json(self) -> Dict[str, Any]:
44+
return {
45+
"Includes": self.includes,
46+
"Results": self.results,
47+
"CounterIncludes": self.counter_includes,
48+
"TimeSeriesIncludes": self.time_series_includes,
49+
"CompareExchangeValueIncludes": self.compare_exchange_includes,
50+
"NextPageStart": self.next_page_start,
51+
}

ravendb/documents/session/document_session.py

Lines changed: 44 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -375,16 +375,30 @@ def load_starting_with(
375375
start_after: Optional[str] = None,
376376
) -> List[_T]:
377377
load_starting_with_operation = LoadStartingWithOperation(self)
378-
self.__load_starting_with_internal(
379-
id_prefix, load_starting_with_operation, None, matches, start, page_size, exclude, start_after
378+
self._load_starting_with_internal(
379+
id_prefix, load_starting_with_operation, matches, start, page_size, exclude, start_after
380380
)
381381
return load_starting_with_operation.get_documents(object_type)
382382

383-
def __load_starting_with_internal(
383+
def load_starting_with_into_stream(
384+
self,
385+
id_prefix: str,
386+
matches: str = None,
387+
start: int = 0,
388+
page_size: int = 25,
389+
exclude: str = None,
390+
start_after: str = None,
391+
) -> bytes:
392+
if id_prefix is None:
393+
raise ValueError("Arg 'id_prefix' is cannot be None.")
394+
return self._load_starting_with_into_stream_internal(
395+
id_prefix, LoadStartingWithOperation(self), matches, start, page_size, exclude, start_after
396+
)
397+
398+
def _load_starting_with_internal(
384399
self,
385400
id_prefix: str,
386401
operation: LoadStartingWithOperation,
387-
stream,
388402
matches: str,
389403
start: int,
390404
page_size: int,
@@ -395,12 +409,31 @@ def __load_starting_with_internal(
395409
command = operation.create_request()
396410
if command:
397411
self._request_executor.execute_command(command, self.session_info)
398-
if stream:
399-
pass # todo: stream
400-
else:
401-
operation.set_result(command.result)
412+
operation.set_result(command.result)
402413
return command
403414

415+
def _load_starting_with_into_stream_internal(
416+
self,
417+
id_prefix: str,
418+
operation: LoadStartingWithOperation,
419+
matches: str,
420+
start: int,
421+
page_size: int,
422+
exclude: str,
423+
start_after: str,
424+
) -> bytes:
425+
operation.with_start_with(id_prefix, matches, start, page_size, exclude, start_after)
426+
command = operation.create_request()
427+
bytes_result = None
428+
if command:
429+
self.request_executor.execute_command(command, self.session_info)
430+
try:
431+
result = command.result
432+
bytes_result = json.dumps(result.to_json()).encode("utf-8")
433+
except Exception as e:
434+
raise RuntimeError("Unable sto serialize returned value into stream") from e
435+
return bytes_result
436+
404437
def document_query_from_index_type(self, index_type: Type[_TIndex], object_type: Type[_T]) -> DocumentQuery[_T]:
405438
try:
406439
index = Utils.try_get_new_instance(index_type)
@@ -783,32 +816,6 @@ def wait_for_indexes_after_save_changes(
783816

784817
index_options.wait_for_indexes = True
785818

786-
def __load_starting_with_internal(
787-
self,
788-
id_prefix: str,
789-
operation: LoadStartingWithOperation,
790-
stream: Union[None, bytes],
791-
matches: str,
792-
start: int,
793-
page_size: int,
794-
exclude: str,
795-
start_after: str,
796-
) -> GetDocumentsCommand:
797-
operation.with_start_with(id_prefix, matches, start, page_size, exclude, start_after)
798-
command = operation.create_request()
799-
if command is not None:
800-
self._session._request_executor.execute(command, self._session.session_info)
801-
if stream:
802-
try:
803-
result = command.result
804-
stream_to_dict = json.loads(stream.decode("utf-8"))
805-
result.__dict__.update(stream_to_dict)
806-
except IOError as e:
807-
raise RuntimeError(f"Unable to serialize returned value into stream {e.args[0]}", e)
808-
else:
809-
operation.set_result(command.result)
810-
return command
811-
812819
def load_starting_with(
813820
self,
814821
id_prefix: str,
@@ -826,26 +833,14 @@ def load_starting_with(
826833
def load_starting_with_into_stream(
827834
self,
828835
id_prefix: str,
829-
output: bytes,
830836
matches: str = None,
831837
start: int = 0,
832838
page_size: int = 25,
833839
exclude: str = None,
834840
start_after: str = None,
835-
):
836-
if not output:
837-
raise ValueError("Output cannot be None")
838-
if not id_prefix:
839-
raise ValueError("Id prefix cannot be None")
840-
self.__load_starting_with_internal(
841-
id_prefix,
842-
LoadStartingWithOperation(self._session),
843-
output,
844-
matches,
845-
start,
846-
page_size,
847-
exclude,
848-
start_after,
841+
) -> bytes:
842+
return self._session.load_starting_with_into_stream(
843+
id_prefix, matches, start, page_size, exclude, start_after
849844
)
850845

851846
def load_into_stream(self, keys: List[str], output: bytes) -> None:
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
import json
2+
3+
from ravendb import DocumentStore
4+
from ravendb.documents.commands.results import GetDocumentsResult
5+
from ravendb.infrastructure.orders import Employee
6+
from ravendb.tests.test_base import TestBase
7+
8+
9+
class TestLoadIntoStream(TestBase):
10+
def setUp(self):
11+
super().setUp()
12+
13+
@staticmethod
14+
def insert_data(store: DocumentStore):
15+
with store.open_session() as session:
16+
17+
def _insert_employee(name: str = None):
18+
employee = Employee(first_name=name)
19+
session.store(employee)
20+
21+
_insert_employee("Aviv")
22+
_insert_employee("Iftah")
23+
_insert_employee("Tal")
24+
_insert_employee("Maxim")
25+
_insert_employee("Karmel")
26+
_insert_employee("Grisha")
27+
_insert_employee("Michael")
28+
session.save_changes()
29+
30+
def test_can_load_starting_with_into_stream(self):
31+
self.insert_data(self.store)
32+
with self.store.open_session() as session:
33+
stream = session.advanced.load_starting_with_into_stream("employees/")
34+
json_node = json.loads(stream.decode("utf-8"))
35+
result = GetDocumentsResult.from_json(json_node)
36+
self.assertEqual(7, len(result.results))
37+
names = ["Aviv", "Iftah", "Tal", "Maxim", "Karmel", "Grisha", "Michael"]
38+
for name_from_results in [result["first_name"] for result in result.results]:
39+
self.assertIn(name_from_results, names)
40+
names.remove(name_from_results)

0 commit comments

Comments
 (0)