Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Auto-generated code for main #2768

Merged
merged 1 commit into from
Jan 29, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
652 changes: 434 additions & 218 deletions elasticsearch/_async/client/__init__.py

Large diffs are not rendered by default.

6 changes: 6 additions & 0 deletions elasticsearch/_async/client/async_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,7 @@ async def submit(
ignore_throttled: t.Optional[bool] = None,
ignore_unavailable: t.Optional[bool] = None,
indices_boost: t.Optional[t.Sequence[t.Mapping[str, float]]] = None,
keep_alive: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
keep_on_completion: t.Optional[bool] = None,
knn: t.Optional[
t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]]
Expand Down Expand Up @@ -383,6 +384,9 @@ async def submit(
:param ignore_unavailable: Whether specified concrete indices should be ignored
when unavailable (missing or closed)
:param indices_boost: Boosts the _score of documents from specified indices.
:param keep_alive: Specifies how long the async search needs to be available.
Ongoing async searches and any saved search results are deleted after this
period.
:param keep_on_completion: If `true`, results are stored for later retrieval
when the search completes within the `wait_for_completion_timeout`.
:param knn: Defines the approximate kNN search to run.
Expand Down Expand Up @@ -508,6 +512,8 @@ async def submit(
__query["ignore_throttled"] = ignore_throttled
if ignore_unavailable is not None:
__query["ignore_unavailable"] = ignore_unavailable
if keep_alive is not None:
__query["keep_alive"] = keep_alive
if keep_on_completion is not None:
__query["keep_on_completion"] = keep_on_completion
if lenient is not None:
Expand Down
14 changes: 14 additions & 0 deletions elasticsearch/_async/client/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ async def delete(
delete_sync_jobs: t.Optional[bool] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
hard: t.Optional[bool] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
Expand All @@ -103,6 +104,7 @@ async def delete(
:param connector_id: The unique identifier of the connector to be deleted
:param delete_sync_jobs: A flag indicating if associated sync jobs should be
also removed. Defaults to false.
:param hard: A flag indicating if the connector should be hard deleted.
"""
if connector_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'connector_id'")
Expand All @@ -115,6 +117,8 @@ async def delete(
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if hard is not None:
__query["hard"] = hard
if human is not None:
__query["human"] = human
if pretty is not None:
Expand All @@ -138,6 +142,7 @@ async def get(
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
include_deleted: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Expand All @@ -150,6 +155,8 @@ async def get(
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/get-connector-api.html>`_

:param connector_id: The unique identifier of the connector
:param include_deleted: A flag to indicate if the desired connector should be
fetched, even if it was soft-deleted.
"""
if connector_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'connector_id'")
Expand All @@ -162,6 +169,8 @@ async def get(
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if include_deleted is not None:
__query["include_deleted"] = include_deleted
if pretty is not None:
__query["pretty"] = pretty
__headers = {"accept": "application/json"}
Expand Down Expand Up @@ -333,6 +342,7 @@ async def list(
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
from_: t.Optional[int] = None,
human: t.Optional[bool] = None,
include_deleted: t.Optional[bool] = None,
index_name: t.Optional[t.Union[str, t.Sequence[str]]] = None,
pretty: t.Optional[bool] = None,
query: t.Optional[str] = None,
Expand All @@ -351,6 +361,8 @@ async def list(
:param connector_name: A comma-separated list of connector names to fetch connector
documents for
:param from_: Starting offset (default: 0)
:param include_deleted: A flag to indicate if the desired connector should be
fetched, even if it was soft-deleted.
:param index_name: A comma-separated list of connector index names to fetch connector
documents for
:param query: A wildcard query string that filters connectors with matching name,
Expand All @@ -372,6 +384,8 @@ async def list(
__query["from"] = from_
if human is not None:
__query["human"] = human
if include_deleted is not None:
__query["include_deleted"] = include_deleted
if index_name is not None:
__query["index_name"] = index_name
if pretty is not None:
Expand Down
211 changes: 211 additions & 0 deletions elasticsearch/_async/client/indices.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,51 @@ async def analyze(
path_parts=__path_parts,
)

@_rewrite_parameters()
@_stability_warning(Stability.EXPERIMENTAL)
async def cancel_migrate_reindex(
self,
*,
index: t.Union[str, t.Sequence[str]],
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
.. raw:: html

<p>Cancel a migration reindex operation.</p>
<p>Cancel a migration reindex attempt for a data stream or index.</p>


`<https://www.elastic.co/guide/en/elasticsearch/reference/master/migrate-data-stream.html>`_

:param index: The index or data stream name
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
__path_parts: t.Dict[str, str] = {"index": _quote(index)}
__path = f'/_migration/reindex/{__path_parts["index"]}/_cancel'
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__headers = {"accept": "application/json"}
return await self.perform_request( # type: ignore[return-value]
"POST",
__path,
params=__query,
headers=__headers,
endpoint_id="indices.cancel_migrate_reindex",
path_parts=__path_parts,
)

@_rewrite_parameters()
async def clear_cache(
self,
Expand Down Expand Up @@ -710,6 +755,71 @@ async def create_data_stream(
path_parts=__path_parts,
)

@_rewrite_parameters(
body_name="create_from",
)
@_stability_warning(Stability.EXPERIMENTAL)
async def create_from(
self,
*,
source: str,
dest: str,
create_from: t.Optional[t.Mapping[str, t.Any]] = None,
body: t.Optional[t.Mapping[str, t.Any]] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
.. raw:: html

<p>Create an index from a source index.</p>
<p>Copy the mappings and settings from the source index to a destination index while allowing request settings and mappings to override the source values.</p>


`<https://www.elastic.co/guide/en/elasticsearch/reference/master/migrate-data-stream.html>`_

:param source: The source index or data stream name
:param dest: The destination index or data stream name
:param create_from:
"""
if source in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'source'")
if dest in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'dest'")
if create_from is None and body is None:
raise ValueError(
"Empty value passed for parameters 'create_from' and 'body', one of them should be set."
)
elif create_from is not None and body is not None:
raise ValueError("Cannot set both 'create_from' and 'body'")
__path_parts: t.Dict[str, str] = {
"source": _quote(source),
"dest": _quote(dest),
}
__path = f'/_create_from/{__path_parts["source"]}/{__path_parts["dest"]}'
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__body = create_from if create_from is not None else body
__headers = {"accept": "application/json", "content-type": "application/json"}
return await self.perform_request( # type: ignore[return-value]
"PUT",
__path,
params=__query,
headers=__headers,
body=__body,
endpoint_id="indices.create_from",
path_parts=__path_parts,
)

@_rewrite_parameters()
async def data_streams_stats(
self,
Expand Down Expand Up @@ -2587,6 +2697,51 @@ async def get_mapping(
path_parts=__path_parts,
)

@_rewrite_parameters()
@_stability_warning(Stability.EXPERIMENTAL)
async def get_migrate_reindex_status(
self,
*,
index: t.Union[str, t.Sequence[str]],
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
.. raw:: html

<p>Get the migration reindexing status.</p>
<p>Get the status of a migration reindex attempt for a data stream or index.</p>


`<https://www.elastic.co/guide/en/elasticsearch/reference/master/migrate-data-stream.html>`_

:param index: The index or data stream name.
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
__path_parts: t.Dict[str, str] = {"index": _quote(index)}
__path = f'/_migration/reindex/{__path_parts["index"]}/_status'
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__headers = {"accept": "application/json"}
return await self.perform_request( # type: ignore[return-value]
"GET",
__path,
params=__query,
headers=__headers,
endpoint_id="indices.get_migrate_reindex_status",
path_parts=__path_parts,
)

@_rewrite_parameters()
async def get_settings(
self,
Expand Down Expand Up @@ -2756,6 +2911,62 @@ async def get_template(
path_parts=__path_parts,
)

@_rewrite_parameters(
body_name="reindex",
)
@_stability_warning(Stability.EXPERIMENTAL)
async def migrate_reindex(
self,
*,
reindex: t.Optional[t.Mapping[str, t.Any]] = None,
body: t.Optional[t.Mapping[str, t.Any]] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
.. raw:: html

<p>Reindex legacy backing indices.</p>
<p>Reindex all legacy backing indices for a data stream.
This operation occurs in a persistent task.
The persistent task ID is returned immediately and the reindexing work is completed in that task.</p>


`<https://www.elastic.co/guide/en/elasticsearch/reference/master/migrate-data-stream.html>`_

:param reindex:
"""
if reindex is None and body is None:
raise ValueError(
"Empty value passed for parameters 'reindex' and 'body', one of them should be set."
)
elif reindex is not None and body is not None:
raise ValueError("Cannot set both 'reindex' and 'body'")
__path_parts: t.Dict[str, str] = {}
__path = "/_migration/reindex"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__body = reindex if reindex is not None else body
__headers = {"accept": "application/json", "content-type": "application/json"}
return await self.perform_request( # type: ignore[return-value]
"POST",
__path,
params=__query,
headers=__headers,
body=__body,
endpoint_id="indices.migrate_reindex",
path_parts=__path_parts,
)

@_rewrite_parameters()
async def migrate_to_data_stream(
self,
Expand Down
Loading
Loading