16
16
from openeo .rest ._datacube import (
17
17
THIS ,
18
18
UDF ,
19
+ _ensure_save_result ,
19
20
_ProcessGraphAbstraction ,
20
21
build_child_callback ,
21
22
)
@@ -37,6 +38,8 @@ class VectorCube(_ProcessGraphAbstraction):
37
38
A geometry is specified in a 'coordinate reference system'. https://www.w3.org/TR/sdw-bp/#dfn-coordinate-reference-system-(crs)
38
39
"""
39
40
41
+ _DEFAULT_VECTOR_FORMAT = "GeoJSON"
42
+
40
43
def __init__ (self , graph : PGNode , connection : Connection , metadata : Optional [CubeMetadata ] = None ):
41
44
super ().__init__ (pgnode = graph , connection = connection )
42
45
self .metadata = metadata
@@ -195,38 +198,6 @@ def save_result(self, format: Union[str, None] = "GeoJSON", options: dict = None
195
198
},
196
199
)
197
200
198
- def _ensure_save_result (
199
- self ,
200
- format : Optional [str ] = None ,
201
- options : Optional [dict ] = None ,
202
- ) -> VectorCube :
203
- """
204
- Make sure there is a (final) `save_result` node in the process graph.
205
- If there is already one: check if it is consistent with the given format/options (if any)
206
- and add a new one otherwise.
207
-
208
- :param format: (optional) desired `save_result` file format
209
- :param options: (optional) desired `save_result` file format parameters
210
- :return:
211
- """
212
- # TODO #401 Unify with DataCube._ensure_save_result and move to generic data cube parent class
213
- result_node = self .result_node ()
214
- if result_node .process_id == "save_result" :
215
- # There is already a `save_result` node:
216
- # check if it is consistent with given format/options (if any)
217
- args = result_node .arguments
218
- if format is not None and format .lower () != args ["format" ].lower ():
219
- raise ValueError (f"Existing `save_result` node with different format { args ['format' ]!r} != { format !r} " )
220
- if options is not None and options != args ["options" ]:
221
- raise ValueError (
222
- f"Existing `save_result` node with different options { args ['options' ]!r} != { options !r} "
223
- )
224
- cube = self
225
- else :
226
- # No `save_result` node yet: automatically add it.
227
- cube = self .save_result (format = format or "GeoJSON" , options = options )
228
- return cube
229
-
230
201
def execute (self , * , validate : Optional [bool ] = None ) -> dict :
231
202
"""Executes the process graph."""
232
203
return self ._connection .execute (self .flat_graph (), validate = validate )
@@ -255,11 +226,15 @@ def download(
255
226
When not specified explicitly, output format is guessed from output file extension.
256
227
257
228
"""
258
- # TODO #401 make outputfile optional (See DataCube.download)
259
- # TODO #401/#449 don't guess/override format if there is already a save_result with format?
260
- if format is None and outputfile :
261
- format = guess_format (outputfile )
262
- cube = self ._ensure_save_result (format = format , options = options )
229
+ # TODO #278 centralize download/create_job/execute_job logic in DataCube, VectorCube, MlModel, ...
230
+ cube = _ensure_save_result (
231
+ cube = self ,
232
+ format = format ,
233
+ options = options ,
234
+ weak_format = guess_format (outputfile ) if outputfile else None ,
235
+ default_format = self ._DEFAULT_VECTOR_FORMAT ,
236
+ method = "VectorCube.download()" ,
237
+ )
263
238
return self ._connection .download (cube .flat_graph (), outputfile = outputfile , validate = validate )
264
239
265
240
def execute_batch (
@@ -291,11 +266,15 @@ def execute_batch(
291
266
.. versionchanged:: 0.21.0
292
267
When not specified explicitly, output format is guessed from output file extension.
293
268
"""
294
- if out_format is None and outputfile :
295
- # TODO #401/#449 don't guess/override format if there is already a save_result with format?
296
- out_format = guess_format (outputfile )
297
-
298
- job = self .create_job (out_format , job_options = job_options , validate = validate , ** format_options )
269
+ cube = _ensure_save_result (
270
+ cube = self ,
271
+ format = out_format ,
272
+ options = format_options ,
273
+ weak_format = guess_format (outputfile ) if outputfile else None ,
274
+ default_format = self ._DEFAULT_VECTOR_FORMAT ,
275
+ method = "VectorCube.execute_batch()" ,
276
+ )
277
+ job = cube .create_job (job_options = job_options , validate = validate )
299
278
return job .run_synchronous (
300
279
# TODO #135 support multi file result sets too
301
280
outputfile = outputfile ,
@@ -331,8 +310,14 @@ def create_job(
331
310
:return: Created job.
332
311
"""
333
312
# TODO: avoid using all kwargs as format_options
334
- # TODO: centralize `create_job` for `DataCube`, `VectorCube`, `MlModel`, ...
335
- cube = self ._ensure_save_result (format = out_format , options = format_options or None )
313
+ # TODO #278 centralize download/create_job/execute_job logic in DataCube, VectorCube, MlModel, ...
314
+ cube = _ensure_save_result (
315
+ cube = self ,
316
+ format = out_format ,
317
+ options = format_options or None ,
318
+ default_format = self ._DEFAULT_VECTOR_FORMAT ,
319
+ method = "VectorCube.create_job()" ,
320
+ )
336
321
return self ._connection .create_job (
337
322
process_graph = cube .flat_graph (),
338
323
title = title ,
0 commit comments