Skip to content

Commit 43b5f9e

Browse files
feat(specs): add global push endpoint (generated)
algolia/api-clients-automation#4855 Co-authored-by: algolia-bot <accounts+algolia-api-client-bot@algolia.com> Co-authored-by: Clément Vannicatte <vannicattec@gmail.com>
1 parent 1825ad2 commit 43b5f9e

File tree

2 files changed

+222
-50
lines changed

2 files changed

+222
-50
lines changed

algoliasearch/ingestion/client.py

Lines changed: 222 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -3566,20 +3566,117 @@ async def list_transformations(
35663566
)
35673567
return resp.deserialize(ListTransformationsResponse, resp.raw_data)
35683568

3569+
async def push_with_http_info(
3570+
self,
3571+
index_name: Annotated[
3572+
StrictStr,
3573+
Field(description="Name of the index on which to perform the operation."),
3574+
],
3575+
push_task_payload: Union[PushTaskPayload, dict[str, Any]],
3576+
watch: Annotated[
3577+
Optional[StrictBool],
3578+
Field(
3579+
description="When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding."
3580+
),
3581+
] = None,
3582+
request_options: Optional[Union[dict, RequestOptions]] = None,
3583+
) -> ApiResponse[str]:
3584+
"""
3585+
Pushes records through the Pipeline, directly to an index. You can make the call synchronous by providing the `watch` parameter, for asynchronous calls, you can use the observability endpoints and/or debugger dashboard to see the status of your task. If you want to leverage the [pre-indexing data transformation](https://www.algolia.com/doc/guides/sending-and-managing-data/send-and-update-your-data/how-to/transform-your-data/), this is the recommended way of ingesting your records. This method is similar to `pushTask`, but requires an `indexName` instead of a `taskID`. If zero or many tasks are found, an error will be returned.
3586+
3587+
Required API Key ACLs:
3588+
- addObject
3589+
- deleteIndex
3590+
- editSettings
3591+
3592+
:param index_name: Name of the index on which to perform the operation. (required)
3593+
:type index_name: str
3594+
:param push_task_payload: (required)
3595+
:type push_task_payload: PushTaskPayload
3596+
:param watch: When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding.
3597+
:type watch: bool
3598+
:param request_options: The request options to send along with the query, they will be merged with the transporter base parameters (headers, query params, timeouts, etc.). (optional)
3599+
:return: Returns the raw algoliasearch 'APIResponse' object.
3600+
"""
3601+
3602+
if index_name is None:
3603+
raise ValueError("Parameter `index_name` is required when calling `push`.")
3604+
3605+
if push_task_payload is None:
3606+
raise ValueError(
3607+
"Parameter `push_task_payload` is required when calling `push`."
3608+
)
3609+
3610+
_query_parameters: Dict[str, Any] = {}
3611+
3612+
if watch is not None:
3613+
_query_parameters["watch"] = watch
3614+
3615+
_data = {}
3616+
if push_task_payload is not None:
3617+
_data = push_task_payload
3618+
3619+
return await self._transporter.request(
3620+
verb=Verb.POST,
3621+
path="/1/push/{indexName}".replace(
3622+
"{indexName}", quote(str(index_name), safe="")
3623+
),
3624+
request_options=self._request_options.merge(
3625+
query_parameters=_query_parameters,
3626+
data=dumps(body_serializer(_data)),
3627+
timeouts={
3628+
"read": 180000,
3629+
"write": 180000,
3630+
"connect": 180000,
3631+
},
3632+
user_request_options=request_options,
3633+
),
3634+
use_read_transporter=False,
3635+
)
3636+
3637+
async def push(
3638+
self,
3639+
index_name: Annotated[
3640+
StrictStr,
3641+
Field(description="Name of the index on which to perform the operation."),
3642+
],
3643+
push_task_payload: Union[PushTaskPayload, dict[str, Any]],
3644+
watch: Annotated[
3645+
Optional[StrictBool],
3646+
Field(
3647+
description="When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding."
3648+
),
3649+
] = None,
3650+
request_options: Optional[Union[dict, RequestOptions]] = None,
3651+
) -> WatchResponse:
3652+
"""
3653+
Pushes records through the Pipeline, directly to an index. You can make the call synchronous by providing the `watch` parameter, for asynchronous calls, you can use the observability endpoints and/or debugger dashboard to see the status of your task. If you want to leverage the [pre-indexing data transformation](https://www.algolia.com/doc/guides/sending-and-managing-data/send-and-update-your-data/how-to/transform-your-data/), this is the recommended way of ingesting your records. This method is similar to `pushTask`, but requires an `indexName` instead of a `taskID`. If zero or many tasks are found, an error will be returned.
3654+
3655+
Required API Key ACLs:
3656+
- addObject
3657+
- deleteIndex
3658+
- editSettings
3659+
3660+
:param index_name: Name of the index on which to perform the operation. (required)
3661+
:type index_name: str
3662+
:param push_task_payload: (required)
3663+
:type push_task_payload: PushTaskPayload
3664+
:param watch: When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding.
3665+
:type watch: bool
3666+
:param request_options: The request options to send along with the query, they will be merged with the transporter base parameters (headers, query params, timeouts, etc.). (optional)
3667+
:return: Returns the deserialized response in a 'WatchResponse' result object.
3668+
"""
3669+
resp = await self.push_with_http_info(
3670+
index_name, push_task_payload, watch, request_options
3671+
)
3672+
return resp.deserialize(WatchResponse, resp.raw_data)
3673+
35693674
async def push_task_with_http_info(
35703675
self,
35713676
task_id: Annotated[
35723677
StrictStr, Field(description="Unique identifier of a task.")
35733678
],
3574-
push_task_payload: Union[
3575-
Annotated[
3576-
PushTaskPayload,
3577-
Field(
3578-
description="Request body of a Search API `batch` request that will be pushed in the Connectors pipeline."
3579-
),
3580-
],
3581-
dict[str, Any],
3582-
],
3679+
push_task_payload: Union[PushTaskPayload, dict[str, Any]],
35833680
watch: Annotated[
35843681
Optional[StrictBool],
35853682
Field(
@@ -3589,7 +3686,7 @@ async def push_task_with_http_info(
35893686
request_options: Optional[Union[dict, RequestOptions]] = None,
35903687
) -> ApiResponse[str]:
35913688
"""
3592-
Push a `batch` request payload through the Pipeline. You can check the status of task pushes with the observability endpoints.
3689+
Pushes records through the Pipeline, directly to an index. You can make the call synchronous by providing the `watch` parameter, for asynchronous calls, you can use the observability endpoints and/or debugger dashboard to see the status of your task. If you want to leverage the [pre-indexing data transformation](https://www.algolia.com/doc/guides/sending-and-managing-data/send-and-update-your-data/how-to/transform-your-data/), this is the recommended way of ingesting your records. This method is similar to `push`, but requires a `taskID` instead of a `indexName`, which is useful when many `destinations` target the same `indexName`.
35933690
35943691
Required API Key ACLs:
35953692
- addObject
@@ -3598,7 +3695,7 @@ async def push_task_with_http_info(
35983695
35993696
:param task_id: Unique identifier of a task. (required)
36003697
:type task_id: str
3601-
:param push_task_payload: Request body of a Search API `batch` request that will be pushed in the Connectors pipeline. (required)
3698+
:param push_task_payload: (required)
36023699
:type push_task_payload: PushTaskPayload
36033700
:param watch: When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding.
36043701
:type watch: bool
@@ -3648,15 +3745,7 @@ async def push_task(
36483745
task_id: Annotated[
36493746
StrictStr, Field(description="Unique identifier of a task.")
36503747
],
3651-
push_task_payload: Union[
3652-
Annotated[
3653-
PushTaskPayload,
3654-
Field(
3655-
description="Request body of a Search API `batch` request that will be pushed in the Connectors pipeline."
3656-
),
3657-
],
3658-
dict[str, Any],
3659-
],
3748+
push_task_payload: Union[PushTaskPayload, dict[str, Any]],
36603749
watch: Annotated[
36613750
Optional[StrictBool],
36623751
Field(
@@ -3666,7 +3755,7 @@ async def push_task(
36663755
request_options: Optional[Union[dict, RequestOptions]] = None,
36673756
) -> WatchResponse:
36683757
"""
3669-
Push a `batch` request payload through the Pipeline. You can check the status of task pushes with the observability endpoints.
3758+
Pushes records through the Pipeline, directly to an index. You can make the call synchronous by providing the `watch` parameter, for asynchronous calls, you can use the observability endpoints and/or debugger dashboard to see the status of your task. If you want to leverage the [pre-indexing data transformation](https://www.algolia.com/doc/guides/sending-and-managing-data/send-and-update-your-data/how-to/transform-your-data/), this is the recommended way of ingesting your records. This method is similar to `push`, but requires a `taskID` instead of a `indexName`, which is useful when many `destinations` target the same `indexName`.
36703759
36713760
Required API Key ACLs:
36723761
- addObject
@@ -3675,7 +3764,7 @@ async def push_task(
36753764
36763765
:param task_id: Unique identifier of a task. (required)
36773766
:type task_id: str
3678-
:param push_task_payload: Request body of a Search API `batch` request that will be pushed in the Connectors pipeline. (required)
3767+
:param push_task_payload: (required)
36793768
:type push_task_payload: PushTaskPayload
36803769
:param watch: When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding.
36813770
:type watch: bool
@@ -8474,20 +8563,117 @@ def list_transformations(
84748563
)
84758564
return resp.deserialize(ListTransformationsResponse, resp.raw_data)
84768565

8566+
def push_with_http_info(
8567+
self,
8568+
index_name: Annotated[
8569+
StrictStr,
8570+
Field(description="Name of the index on which to perform the operation."),
8571+
],
8572+
push_task_payload: Union[PushTaskPayload, dict[str, Any]],
8573+
watch: Annotated[
8574+
Optional[StrictBool],
8575+
Field(
8576+
description="When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding."
8577+
),
8578+
] = None,
8579+
request_options: Optional[Union[dict, RequestOptions]] = None,
8580+
) -> ApiResponse[str]:
8581+
"""
8582+
Pushes records through the Pipeline, directly to an index. You can make the call synchronous by providing the `watch` parameter, for asynchronous calls, you can use the observability endpoints and/or debugger dashboard to see the status of your task. If you want to leverage the [pre-indexing data transformation](https://www.algolia.com/doc/guides/sending-and-managing-data/send-and-update-your-data/how-to/transform-your-data/), this is the recommended way of ingesting your records. This method is similar to `pushTask`, but requires an `indexName` instead of a `taskID`. If zero or many tasks are found, an error will be returned.
8583+
8584+
Required API Key ACLs:
8585+
- addObject
8586+
- deleteIndex
8587+
- editSettings
8588+
8589+
:param index_name: Name of the index on which to perform the operation. (required)
8590+
:type index_name: str
8591+
:param push_task_payload: (required)
8592+
:type push_task_payload: PushTaskPayload
8593+
:param watch: When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding.
8594+
:type watch: bool
8595+
:param request_options: The request options to send along with the query, they will be merged with the transporter base parameters (headers, query params, timeouts, etc.). (optional)
8596+
:return: Returns the raw algoliasearch 'APIResponse' object.
8597+
"""
8598+
8599+
if index_name is None:
8600+
raise ValueError("Parameter `index_name` is required when calling `push`.")
8601+
8602+
if push_task_payload is None:
8603+
raise ValueError(
8604+
"Parameter `push_task_payload` is required when calling `push`."
8605+
)
8606+
8607+
_query_parameters: Dict[str, Any] = {}
8608+
8609+
if watch is not None:
8610+
_query_parameters["watch"] = watch
8611+
8612+
_data = {}
8613+
if push_task_payload is not None:
8614+
_data = push_task_payload
8615+
8616+
return self._transporter.request(
8617+
verb=Verb.POST,
8618+
path="/1/push/{indexName}".replace(
8619+
"{indexName}", quote(str(index_name), safe="")
8620+
),
8621+
request_options=self._request_options.merge(
8622+
query_parameters=_query_parameters,
8623+
data=dumps(body_serializer(_data)),
8624+
timeouts={
8625+
"read": 180000,
8626+
"write": 180000,
8627+
"connect": 180000,
8628+
},
8629+
user_request_options=request_options,
8630+
),
8631+
use_read_transporter=False,
8632+
)
8633+
8634+
def push(
8635+
self,
8636+
index_name: Annotated[
8637+
StrictStr,
8638+
Field(description="Name of the index on which to perform the operation."),
8639+
],
8640+
push_task_payload: Union[PushTaskPayload, dict[str, Any]],
8641+
watch: Annotated[
8642+
Optional[StrictBool],
8643+
Field(
8644+
description="When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding."
8645+
),
8646+
] = None,
8647+
request_options: Optional[Union[dict, RequestOptions]] = None,
8648+
) -> WatchResponse:
8649+
"""
8650+
Pushes records through the Pipeline, directly to an index. You can make the call synchronous by providing the `watch` parameter, for asynchronous calls, you can use the observability endpoints and/or debugger dashboard to see the status of your task. If you want to leverage the [pre-indexing data transformation](https://www.algolia.com/doc/guides/sending-and-managing-data/send-and-update-your-data/how-to/transform-your-data/), this is the recommended way of ingesting your records. This method is similar to `pushTask`, but requires an `indexName` instead of a `taskID`. If zero or many tasks are found, an error will be returned.
8651+
8652+
Required API Key ACLs:
8653+
- addObject
8654+
- deleteIndex
8655+
- editSettings
8656+
8657+
:param index_name: Name of the index on which to perform the operation. (required)
8658+
:type index_name: str
8659+
:param push_task_payload: (required)
8660+
:type push_task_payload: PushTaskPayload
8661+
:param watch: When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding.
8662+
:type watch: bool
8663+
:param request_options: The request options to send along with the query, they will be merged with the transporter base parameters (headers, query params, timeouts, etc.). (optional)
8664+
:return: Returns the deserialized response in a 'WatchResponse' result object.
8665+
"""
8666+
resp = self.push_with_http_info(
8667+
index_name, push_task_payload, watch, request_options
8668+
)
8669+
return resp.deserialize(WatchResponse, resp.raw_data)
8670+
84778671
def push_task_with_http_info(
84788672
self,
84798673
task_id: Annotated[
84808674
StrictStr, Field(description="Unique identifier of a task.")
84818675
],
8482-
push_task_payload: Union[
8483-
Annotated[
8484-
PushTaskPayload,
8485-
Field(
8486-
description="Request body of a Search API `batch` request that will be pushed in the Connectors pipeline."
8487-
),
8488-
],
8489-
dict[str, Any],
8490-
],
8676+
push_task_payload: Union[PushTaskPayload, dict[str, Any]],
84918677
watch: Annotated[
84928678
Optional[StrictBool],
84938679
Field(
@@ -8497,7 +8683,7 @@ def push_task_with_http_info(
84978683
request_options: Optional[Union[dict, RequestOptions]] = None,
84988684
) -> ApiResponse[str]:
84998685
"""
8500-
Push a `batch` request payload through the Pipeline. You can check the status of task pushes with the observability endpoints.
8686+
Pushes records through the Pipeline, directly to an index. You can make the call synchronous by providing the `watch` parameter, for asynchronous calls, you can use the observability endpoints and/or debugger dashboard to see the status of your task. If you want to leverage the [pre-indexing data transformation](https://www.algolia.com/doc/guides/sending-and-managing-data/send-and-update-your-data/how-to/transform-your-data/), this is the recommended way of ingesting your records. This method is similar to `push`, but requires a `taskID` instead of a `indexName`, which is useful when many `destinations` target the same `indexName`.
85018687
85028688
Required API Key ACLs:
85038689
- addObject
@@ -8506,7 +8692,7 @@ def push_task_with_http_info(
85068692
85078693
:param task_id: Unique identifier of a task. (required)
85088694
:type task_id: str
8509-
:param push_task_payload: Request body of a Search API `batch` request that will be pushed in the Connectors pipeline. (required)
8695+
:param push_task_payload: (required)
85108696
:type push_task_payload: PushTaskPayload
85118697
:param watch: When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding.
85128698
:type watch: bool
@@ -8556,15 +8742,7 @@ def push_task(
85568742
task_id: Annotated[
85578743
StrictStr, Field(description="Unique identifier of a task.")
85588744
],
8559-
push_task_payload: Union[
8560-
Annotated[
8561-
PushTaskPayload,
8562-
Field(
8563-
description="Request body of a Search API `batch` request that will be pushed in the Connectors pipeline."
8564-
),
8565-
],
8566-
dict[str, Any],
8567-
],
8745+
push_task_payload: Union[PushTaskPayload, dict[str, Any]],
85688746
watch: Annotated[
85698747
Optional[StrictBool],
85708748
Field(
@@ -8574,7 +8752,7 @@ def push_task(
85748752
request_options: Optional[Union[dict, RequestOptions]] = None,
85758753
) -> WatchResponse:
85768754
"""
8577-
Push a `batch` request payload through the Pipeline. You can check the status of task pushes with the observability endpoints.
8755+
Pushes records through the Pipeline, directly to an index. You can make the call synchronous by providing the `watch` parameter, for asynchronous calls, you can use the observability endpoints and/or debugger dashboard to see the status of your task. If you want to leverage the [pre-indexing data transformation](https://www.algolia.com/doc/guides/sending-and-managing-data/send-and-update-your-data/how-to/transform-your-data/), this is the recommended way of ingesting your records. This method is similar to `push`, but requires a `taskID` instead of a `indexName`, which is useful when many `destinations` target the same `indexName`.
85788756
85798757
Required API Key ACLs:
85808758
- addObject
@@ -8583,7 +8761,7 @@ def push_task(
85838761
85848762
:param task_id: Unique identifier of a task. (required)
85858763
:type task_id: str
8586-
:param push_task_payload: Request body of a Search API `batch` request that will be pushed in the Connectors pipeline. (required)
8764+
:param push_task_payload: (required)
85878765
:type push_task_payload: PushTaskPayload
85888766
:param watch: When provided, the push operation will be synchronous and the API will wait for the ingestion to be finished before responding.
85898767
:type watch: bool

0 commit comments

Comments
 (0)