Skip to content

Commit 1be221c

Browse files
committed
removes batch as arg - acts always but only on dimension data arrays
1 parent 76dd268 commit 1be221c

File tree

1 file changed

+9
-36
lines changed

1 file changed

+9
-36
lines changed

xarray/backends/pydap_.py

Lines changed: 9 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -36,10 +36,8 @@
3636

3737

3838
class PydapArrayWrapper(BackendArray):
39-
def __init__(self, array, batch=None, checksums=True):
39+
def __init__(self, array, checksums=True):
4040
self.array = array
41-
self._batch = batch
42-
self._checksums = checksums
4341

4442
@property
4543
def shape(self) -> tuple[int, ...]:
@@ -55,19 +53,11 @@ def __getitem__(self, key):
5553
)
5654

5755
def _getitem(self, key):
58-
if self._batch and hasattr(self.array, "dataset"):
59-
# this are both True only for pydap>3.5.5
60-
from pydap.client import data_check, get_batch_data
61-
62-
dataset = self.array.dataset
63-
get_batch_data(self.array, checksums=self._checksums, key=key)
64-
result = data_check(np.asarray(dataset[self.array.id].data), key)
65-
else:
66-
result = robust_getitem(self.array, key, catch=ValueError)
67-
result = np.asarray(result.data)
68-
axis = tuple(n for n, k in enumerate(key) if isinstance(k, integer_types))
69-
if result.ndim + len(axis) != self.array.ndim and axis:
70-
result = np.squeeze(result, axis)
56+
result = robust_getitem(self.array, key, catch=ValueError)
57+
result = np.asarray(result.data)
58+
axis = tuple(n for n, k in enumerate(key) if isinstance(k, integer_types))
59+
if result.ndim + len(axis) != self.array.ndim and axis:
60+
result = np.squeeze(result, axis)
7161
return result
7262

7363

@@ -95,7 +85,6 @@ def __init__(
9585
dataset,
9686
group=None,
9787
session=None,
98-
batch=None,
9988
protocol=None,
10089
checksums=True,
10190
):
@@ -108,7 +97,6 @@ def __init__(
10897
"""
10998
self.dataset = dataset
11099
self.group = group
111-
self._batch = batch
112100
self._protocol = protocol
113101
self._checksums = checksums # true by default
114102

@@ -123,7 +111,6 @@ def open(
123111
timeout=None,
124112
verify=None,
125113
user_charset=None,
126-
batch=None,
127114
checksums=True,
128115
):
129116
from pydap.client import open_url
@@ -162,8 +149,6 @@ def open(
162149
args["protocol"] = "dap2"
163150
elif url.startswith("dap4"):
164151
args["protocol"] = "dap4"
165-
if batch:
166-
args["batch"] = batch
167152
return cls(**args)
168153

169154
def open_store_variable(self, var):
@@ -180,19 +165,13 @@ def open_store_variable(self, var):
180165
and var.name in dimensions
181166
and hasattr(var, "dataset") # only True for pydap>3.5.5
182167
):
183-
if not var.dataset._batch_mode:
184-
# for dap4, always batch all dimensions at once
185-
var.dataset.enable_batch_mode()
168+
var.dataset.enable_batch_mode()
186169
data_array = self._get_data_array(var)
187170
data = indexing.LazilyIndexedArray(data_array)
188-
if not self._batch and var.dataset._batch_mode:
189-
# if `batch=False``, restore it for all other variables
190-
var.dataset.disable_batch_mode()
171+
var.dataset.disable_batch_mode()
191172
else:
192173
# all non-dimension variables
193-
data = indexing.LazilyIndexedArray(
194-
PydapArrayWrapper(var, self._batch, self._checksums)
195-
)
174+
data = indexing.LazilyIndexedArray(PydapArrayWrapper(var))
196175

197176
return Variable(dimensions, data, var.attributes)
198177

@@ -311,7 +290,6 @@ def open_dataset(
311290
timeout=None,
312291
verify=None,
313292
user_charset=None,
314-
batch=None,
315293
checksums=True,
316294
) -> Dataset:
317295
store = PydapDataStore.open(
@@ -323,7 +301,6 @@ def open_dataset(
323301
timeout=timeout,
324302
verify=verify,
325303
user_charset=user_charset,
326-
batch=batch,
327304
checksums=checksums,
328305
)
329306
store_entrypoint = StoreBackendEntrypoint()
@@ -357,7 +334,6 @@ def open_datatree(
357334
timeout=None,
358335
verify=None,
359336
user_charset=None,
360-
batch=None,
361337
checksums=True,
362338
) -> DataTree:
363339
groups_dict = self.open_groups_as_dict(
@@ -375,7 +351,6 @@ def open_datatree(
375351
timeout=timeout,
376352
verify=application,
377353
user_charset=user_charset,
378-
batch=batch,
379354
checksums=checksums,
380355
)
381356

@@ -398,7 +373,6 @@ def open_groups_as_dict(
398373
timeout=None,
399374
verify=None,
400375
user_charset=None,
401-
batch=None,
402376
checksums=True,
403377
) -> dict[str, Dataset]:
404378
from xarray.core.treenode import NodePath
@@ -411,7 +385,6 @@ def open_groups_as_dict(
411385
timeout=timeout,
412386
verify=verify,
413387
user_charset=user_charset,
414-
batch=batch,
415388
checksums=checksums,
416389
)
417390

0 commit comments

Comments
 (0)