From 2120c0a46d3417206b7799e274d5447406c23e4f Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Tue, 23 Sep 2025 22:00:26 +0200 Subject: [PATCH 01/13] wip: first attempt to workflow client --- examples/client/client.py | 84 ++++++++ examples/client/server.py | 60 ++++++ pyproject.toml | 1 + src/workflows/client/__init__.py | 3 + src/workflows/client/client.py | 342 +++++++++++++++++++++++++++++++ 5 files changed, 490 insertions(+) create mode 100644 examples/client/client.py create mode 100644 examples/client/server.py create mode 100644 src/workflows/client/__init__.py create mode 100644 src/workflows/client/client.py diff --git a/examples/client/client.py b/examples/client/client.py new file mode 100644 index 0000000..64b93eb --- /dev/null +++ b/examples/client/client.py @@ -0,0 +1,84 @@ +import asyncio + +from workflows.client.client import WorkflowClient + +from workflows.events import StartEvent, HumanResponseEvent +from pydantic import PrivateAttr, model_validator, Field + +from typing import Literal, Callable, Self + + +class InputNumbers(StartEvent): + a: int + b: int + operation: Literal["sum", "subtraction"] = Field(default="sum") + _function: Callable[[int, int], int] = PrivateAttr(default=lambda a, b: a + b) + + @model_validator(mode="after") + def assign_function(self) -> Self: + if self.operation == "subtraction": + self._function = lambda a, b: a - b + return self + + +class HumanApprovedResult(HumanResponseEvent): + approved: bool + + +async def main() -> None: + client = WorkflowClient(protocol="http", host="localhost", port=8000) + workflows = await client.list_workflows() + print("===== AVAILABLE WORKFLOWS ====") + print(workflows) + is_healthy = await client.is_healthy() + print("==== HEALTH CHECK ====") + print("Healthy" if is_healthy else "Not Healty :(") + ping_time = await client.ping() + print("==== PING TIME ====") + print(ping_time, "ms") + handler_id = await client.run_workflow_nowait( + "add_or_subtract", + start_event=InputNumbers(a=1, b=3, operation="sum"), + context=None, + ) + print("==== STARTING THE WORKFLOW ===") + print(f"Workflow running with handler ID: {handler_id}") + print("=== STREAMING EVENTS ===") + + def handle_event(event_data: dict) -> None: + print(f"Received event: {event_data}") + + # Stream events in background + stream_task = asyncio.create_task( + client.stream_events( + handler_id=handler_id, + event_callback=handle_event, + sse=True, # Use Server-Sent Events + ) + ) + + # Poll for result + result = None + while result is None: + try: + result = await client.get_result(handler_id) + if result is not None: + break + await asyncio.sleep(1) + except Exception as e: + print(f"Error: {e}") + await asyncio.sleep(1) + + # Cancel streaming task + stream_task.cancel() + try: + await stream_task + except asyncio.CancelledError: + pass + + print(f"Final result: {result}") + return result + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/client/server.py b/examples/client/server.py new file mode 100644 index 0000000..dbb578b --- /dev/null +++ b/examples/client/server.py @@ -0,0 +1,60 @@ +from workflows import Workflow, step, Context +from workflows.events import StartEvent, StopEvent, InputRequiredEvent +from pydantic import Field +from workflows.server import WorkflowServer + +from typing import Literal + + +class InputNumbers(StartEvent): + a: int + b: int + operation: Literal["sum", "subtraction"] = Field(default="sum") + + +class CalculationEvent(InputRequiredEvent): + result: int + + +class OutputEvent(StopEvent): + message: str + + +class AddOrSubtractWorkflow(Workflow): + @step + async def first_step( + self, ev: InputNumbers, ctx: Context + ) -> CalculationEvent | None: + ctx.write_event_to_stream(ev) + result = ev.a + ev.b if ev.operation == "sum" else ev.a - ev.b + async with ctx.store.edit_state() as state: + state.operation = ev.operation + state.a = ev.a + state.b = ev.b + state.result = result + ctx.write_event_to_stream(CalculationEvent(result=result)) + return CalculationEvent(result=result) + + @step + async def second_step(self, ev: CalculationEvent, ctx: Context) -> OutputEvent: + state = await ctx.store.get_state() + return OutputEvent( + message=f"You approved the result from your operation ({state.operation}) between {state.a} and {state.b}: {ev.result}" + ) + + +async def main() -> None: + server = WorkflowServer() + server.add_workflow("add_or_subtract", AddOrSubtractWorkflow(timeout=1000)) + try: + await server.serve("localhost", 8000) + except KeyboardInterrupt: + return + except Exception as e: + raise ValueError(f"An error occurred: {e}") + + +if __name__ == "__main__": + import asyncio + + asyncio.run(main()) diff --git a/pyproject.toml b/pyproject.toml index 8af9780..0d43951 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,7 @@ dependencies = [ [project.optional-dependencies] server = ["starlette>=0.39.0", "uvicorn>=0.32.0"] +client = ["httpx>=0.28.1"] [tool.basedpyright] typeCheckingMode = "standard" diff --git a/src/workflows/client/__init__.py b/src/workflows/client/__init__.py new file mode 100644 index 0000000..69bc789 --- /dev/null +++ b/src/workflows/client/__init__.py @@ -0,0 +1,3 @@ +from .client import WorkflowClient + +__all__ = ["WorkflowClient"] diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py new file mode 100644 index 0000000..0861754 --- /dev/null +++ b/src/workflows/client/client.py @@ -0,0 +1,342 @@ +import httpx +import time +import json +import inspect + +from typing import Literal, Any, Union, Callable, AsyncGenerator, AsyncIterator +from contextlib import asynccontextmanager +from logging import getLogger +from workflows.events import StartEvent, Event +from workflows import Context + + +logger = getLogger(__name__) + + +class WorkflowClient: + def __init__( + self, + protocol: Literal["http", "https"] | None = None, + host: str | None = None, + port: int | None = None, + timeout: int | None = None, + ): + # TODO: middleware-related logic + self.protocol = protocol or "http" + self.host = host or "localhost" + self.port = port or 8000 + self.timeout = timeout or 600 + # TODO: add some basic TLS/verification and auth features + + @asynccontextmanager + async def _get_client(self) -> AsyncIterator: + async with httpx.AsyncClient( + base_url=self.protocol + "://" + self.host + ":" + str(self.port), + timeout=self.timeout, + ) as client: + yield client + + async def is_healthy(self) -> bool: + """ + Check whether the workflow server is helathy or not + + Returns: + bool: True if the workflow server is healthy, false if not + """ + async with self._get_client() as client: + response = await client.get("/health") + if response.status_code == 200: + return response.json().get("status", "") == "healthy" + return False + + async def ping(self) -> float: + """ + Ping the workflow and get the latency in milliseconds + + Returns: + float: latency in milliseconds + """ + async with self._get_client() as client: + start = time.time() + response = await client.get("/health") + if response.status_code == 200: + end = time.time() + return (end - start) * 1000 + else: + raise httpx.ConnectError( + f"Failed to establish a connection with server running on: {self.protocol}://{self.host}:{self.port}" + ) + + async def list_workflows(self) -> list[str]: + """ + List workflows + + Returns: + list: List of workflow names available through the server. + """ + async with self._get_client() as client: + response = await client.get("/workflows") + + response.raise_for_status() + + return response.json()["workflows"] + + async def run_workflow( + self, + workflow_name: str, + start_event: Union[StartEvent, dict[str, Any], None] = None, + context: Union[Context, dict[str, Any], None] = None, + **kwargs: Any, + ) -> Any: + """ + Run the workflow and wait until completion. + + Args: + start_event (Union[StartEvent, dict[str, Any], None]): start event class or dictionary representation (optional, defaults to None and get passed as an empty dictionary if not provided). + context: Context or serialized representation of it (optional, defaults to None if not provided) + **kwargs: Any number of keyword arguments that would be passed on as additional keyword arguments to the workflow. + + Returns: + Any: Result of the workflow + """ + if isinstance(start_event, StartEvent): + try: + start_event = start_event.model_dump() + except Exception as e: + raise ValueError( + f"Impossible to serialize the start event because of: {e}" + ) + if isinstance(context, Context): + try: + context = context.to_dict() + except Exception as e: + raise ValueError(f"Impossible to serialize the context because of: {e}") + request_body = { + "start_event": start_event or {}, + "context": context or {}, + "additional_kwargs": kwargs, + } + async with self._get_client() as client: + response = await client.post( + f"/workflows/{workflow_name}/run", json=request_body + ) + + response.raise_for_status() + + return response.json()["result"] + + async def run_workflow_nowait( + self, + workflow_name: str, + start_event: Union[StartEvent, dict[str, Any], None] = None, + context: Union[Context, dict[str, Any], None] = None, + **kwargs: Any, + ) -> str: + """ + Run the workflow in the background. + + Args: + start_event (Union[StartEvent, dict[str, Any], None]): start event class or dictionary representation (optional, defaults to None and get passed as an empty dictionary if not provided). + context: Context or serialized representation of it (optional, defaults to None if not provided) + **kwargs: Any number of keyword arguments that would be passed on as additional keyword arguments to the workflow. + + Returns: + str: ID of the handler running the workflow + """ + if isinstance(start_event, StartEvent): + try: + start_event = start_event.model_dump() + except Exception as e: + raise ValueError( + f"Impossible to serialize the start event because of: {e}" + ) + if isinstance(context, Context): + try: + context = context.to_dict() + except Exception as e: + raise ValueError(f"Impossible to serialize the context because of: {e}") + request_body = { + "start_event": start_event or {}, + "context": context or {}, + "additional_kwargs": kwargs, + } + async with self._get_client() as client: + response = await client.post( + f"/workflows/{workflow_name}/run-nowait", json=request_body + ) + + response.raise_for_status() + + return response.json()["handler_id"] + + async def _stream_events_sse( + self, + handler_id: str, + ) -> AsyncGenerator[dict[str, Any], None]: + """ + Stream events using Server-Sent Events format + """ + url = f"/events/{handler_id}?sse=true" + + async with self._get_client() as client: + try: + async with client.stream( + "GET", + url, + ) as response: + # Handle different response codes + if response.status_code == 404: + raise ValueError("Handler not found") + elif response.status_code == 204: + # Handler completed, no more events + return # type: ignore + + response.raise_for_status() + + async for line in response.aiter_lines(): + if line.startswith("data: "): + # Extract JSON from SSE data line + json_data = line[6:] # Remove 'data: ' prefix + if json_data.strip(): # Skip empty data lines + try: + event = json.loads(json_data.replace("\n", "")) + yield event.get("value", {}) + except json.JSONDecodeError as e: + print( + f"Failed to parse JSON: {e}, data: {json_data}" + ) + continue + + except httpx.TimeoutException: + raise TimeoutError( + f"Timeout waiting for events from handler {handler_id}" + ) + except httpx.RequestError as e: + raise ConnectionError(f"Failed to connect to event stream: {e}") + + async def _stream_events_ndjson( + self, + handler_id: str, + ) -> AsyncGenerator[dict[str, Any], None]: + """ + Stream events using newline-delimited JSON format + """ + url = f"/events/{handler_id}?sse=false" + + async with self._get_client() as client: + try: + async with client.stream("GET", url) as response: + # Handle different response codes + if response.status_code == 404: + raise ValueError("Handler not found") + elif response.status_code == 204: + # Handler completed, no more events + return + + response.raise_for_status() + + async for line in response.aiter_lines(): + if line.strip(): # Skip empty lines + try: + event = json.loads(line.replace("\n", "")) + yield event.get("value", {}) + except json.JSONDecodeError as e: + print(f"Failed to parse JSON: {e}, data: {line}") + continue + + except httpx.TimeoutException: + raise TimeoutError( + f"Timeout waiting for events from handler {handler_id}" + ) + except httpx.RequestError as e: + raise ConnectionError(f"Failed to connect to event stream: {e}") + + async def stream_events( + self, + handler_id: str, + event_callback: Callable[[dict[str, Any]], Any] | None = None, + sse: bool = True, + ) -> None: + """ + Stream events from a running handler. + + Args: + handler_id (str): ID of the handler streaming the events + event_callback (Callable[[dict[str, Any]], Any]): Function to call when an event is received from the stream (optional, defaults to None) + sse (bool): Whether to enable server-sent events or not + + Returns: + None + """ + callback = event_callback or ( + lambda event: logger.info(f"Processing data: {event}") + ) + is_async = inspect.iscoroutinefunction(callback) + if sse: + async for event in self._stream_events_sse(handler_id): + if is_async: + await callback(event) # type: ignore + else: + callback(event) + else: + async for event in self._stream_events_ndjson(handler_id): + if is_async: + await callback(event) # type: ignore + else: + callback(event) + return None + + async def send_event( + self, + handler_id: str, + event: Event | dict[str, Any] | str, + step: str | None = None, + ) -> bool: + """ + Send an event to the workflow. + + Args: + handler_id (str): ID of the handler of the running workflow to send the event to + event (Event | dict[str, Any] | str): Event to send, represented as an Event object, a dictionary or a serialized string. + step (str | None): Step to send the event to (optional, defaults to None) + + Returns: + bool: Success status of the send operation + """ + if isinstance(event, Event): + try: + event = event.model_dump_json() + except Exception as e: + raise ValueError(f"Error while serializing the provided event: {e}") + elif isinstance(event, dict): + try: + event = json.dumps(event) + except Exception as e: + raise ValueError(f"Error while serializing the provided event: {e}") + request_body = {"event": event} + if step: + request_body.update({"step": step}) + async with self._get_client() as client: + response = await client.post(f"/events/{handler_id}", json=request_body) + response.raise_for_status() + + return response.json()["status"] == "sent" + + async def get_result(self, handler_id: str) -> Any: + """ + Get the result of the workflow associated with the specified handler ID. + + Args: + handler_id (str): ID of the handler running the workflow + + Returns: + Any: Result of the workflow + """ + async with self._get_client() as client: + response = await client.get(f"/results/{handler_id}") + response.raise_for_status() + + if response.status_code == 202: + return + + return response.json()["result"] From b4bc1dfb66c2492ec15f04ec3782f30ce8832c1a Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Wed, 24 Sep 2025 15:42:41 +0200 Subject: [PATCH 02/13] chore: use openapi as a base for client --- .gitignore | 3 + .pre-commit-config.yaml | 2 + examples/client/client.py | 54 +- src/workflows/client/client.py | 512 ++++++++---------- src/workflows/client/utils.py | 20 + .../openapi_generated_client/__init__.py | 0 .../workflows_api_client/__init__.py | 8 + .../workflows_api_client/api/__init__.py | 1 + .../api/default/__init__.py | 1 + .../api/default/get_events_handler_id.py | 216 ++++++++ .../api/default/get_handlers.py | 131 +++++ .../api/default/get_health.py | 135 +++++ .../api/default/get_results_handler_id.py | 172 ++++++ .../api/default/get_workflows.py | 135 +++++ .../get_workflows_name_representation.py | 167 ++++++ .../api/default/get_workflows_name_schema.py | 171 ++++++ .../api/default/post_events_handler_id.py | 193 +++++++ .../api/default/post_workflows_name_run.py | 201 +++++++ .../default/post_workflows_name_run_nowait.py | 193 +++++++ .../workflows_api_client/client.py | 268 +++++++++ .../workflows_api_client/errors.py | 16 + .../workflows_api_client/models/__init__.py | 45 ++ .../get_events_handler_id_response_200.py | 74 +++ ...et_events_handler_id_response_200_value.py | 44 ++ .../models/get_health_response_200.py | 59 ++ ...kflows_name_representation_response_200.py | 59 ++ .../get_workflows_name_schema_response_200.py | 67 +++ .../models/get_workflows_response_200.py | 59 ++ .../workflows_api_client/models/handler.py | 197 +++++++ .../models/handler_status.py | 10 + .../models/handlers_list.py | 73 +++ .../models/post_events_handler_id_body.py | 70 +++ .../post_events_handler_id_response_200.py | 61 +++ ...t_events_handler_id_response_200_status.py | 8 + .../models/post_workflows_name_run_body.py | 109 ++++ .../post_workflows_name_run_body_context.py | 44 ++ .../post_workflows_name_run_body_kwargs.py | 44 ++ ...ost_workflows_name_run_body_start_event.py | 44 ++ .../post_workflows_name_run_nowait_body.py | 109 ++++ ..._workflows_name_run_nowait_body_context.py | 44 ++ ...t_workflows_name_run_nowait_body_kwargs.py | 44 ++ ...kflows_name_run_nowait_body_start_event.py | 44 ++ .../workflows_api_client/py.typed | 1 + .../workflows_api_client/types.py | 54 ++ src/workflows/server/server.py | 4 - 45 files changed, 3626 insertions(+), 340 deletions(-) create mode 100644 src/workflows/client/utils.py create mode 100644 src/workflows/openapi_generated_client/__init__.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/__init__.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/__init__.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/__init__.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_events_handler_id.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_handlers.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_health.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_results_handler_id.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_representation.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_schema.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/post_events_handler_id.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run_nowait.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/client.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/errors.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/__init__.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200_value.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_health_response_200.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_representation_response_200.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_schema_response_200.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_response_200.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/handler.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/handler_status.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/handlers_list.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_body.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200_status.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_context.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_kwargs.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_start_event.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_context.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_kwargs.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_start_event.py create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/py.typed create mode 100644 src/workflows/openapi_generated_client/workflows_api_client/types.py diff --git a/.gitignore b/.gitignore index 8e07f9f..200334a 100644 --- a/.gitignore +++ b/.gitignore @@ -147,3 +147,6 @@ cython_debug/ # Generated files openapi.json +src/workflows/openapi_generated_client/README.md +src/workflows/openapi_generated_client/pyproject.toml +src/workflows/openapi_generated_client/.gitignore diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4eceac4..cc0072f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -62,3 +62,5 @@ repos: rev: v0.23.1 hooks: - id: toml-sort-fix + +exclude: ^(src/workflows/openapi_generated_client/) \ No newline at end of file diff --git a/examples/client/client.py b/examples/client/client.py index 64b93eb..1c91abd 100644 --- a/examples/client/client.py +++ b/examples/client/client.py @@ -2,27 +2,16 @@ from workflows.client.client import WorkflowClient -from workflows.events import StartEvent, HumanResponseEvent -from pydantic import PrivateAttr, model_validator, Field +from workflows.events import StartEvent +from pydantic import Field -from typing import Literal, Callable, Self +from typing import Literal class InputNumbers(StartEvent): a: int b: int operation: Literal["sum", "subtraction"] = Field(default="sum") - _function: Callable[[int, int], int] = PrivateAttr(default=lambda a, b: a + b) - - @model_validator(mode="after") - def assign_function(self) -> Self: - if self.operation == "subtraction": - self._function = lambda a, b: a - b - return self - - -class HumanApprovedResult(HumanResponseEvent): - approved: bool async def main() -> None: @@ -36,48 +25,29 @@ async def main() -> None: ping_time = await client.ping() print("==== PING TIME ====") print(ping_time, "ms") - handler_id = await client.run_workflow_nowait( + handler = await client.run_workflow_nowait( "add_or_subtract", start_event=InputNumbers(a=1, b=3, operation="sum"), context=None, ) print("==== STARTING THE WORKFLOW ===") - print(f"Workflow running with handler ID: {handler_id}") - print("=== STREAMING EVENTS ===") - - def handle_event(event_data: dict) -> None: - print(f"Received event: {event_data}") - - # Stream events in background - stream_task = asyncio.create_task( - client.stream_events( - handler_id=handler_id, - event_callback=handle_event, - sse=True, # Use Server-Sent Events - ) - ) - + print(f"Workflow running with handler: {handler}") + # print("=== STREAMING EVENTS ===") + # events = await client.get_workflow_events(handler) + # print(events) # Poll for result - result = None - while result is None: + result = handler.status.value + while result == "running": try: - result = await client.get_result(handler_id) - if result is not None: + result = await client.get_workflow_result(handler) + if result != "running": break await asyncio.sleep(1) except Exception as e: print(f"Error: {e}") await asyncio.sleep(1) - # Cancel streaming task - stream_task.cancel() - try: - await stream_task - except asyncio.CancelledError: - pass - print(f"Final result: {result}") - return result if __name__ == "__main__": diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index 0861754..5486283 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -1,85 +1,144 @@ -import httpx +# TOP-LEVEL import time import json -import inspect -from typing import Literal, Any, Union, Callable, AsyncGenerator, AsyncIterator -from contextlib import asynccontextmanager -from logging import getLogger -from workflows.events import StartEvent, Event +# GENERATED CLASSES (CLIENTS) +from workflows.openapi_generated_client.workflows_api_client import ( + Client, + AuthenticatedClient, +) + +# GENERATED FUNCTIONS (API) +from workflows.openapi_generated_client.workflows_api_client.api.default.get_health import ( + asyncio as get_health, +) +from workflows.openapi_generated_client.workflows_api_client.api.default.get_workflows import ( + asyncio as get_workflows, +) +from workflows.openapi_generated_client.workflows_api_client.api.default.get_handlers import ( + asyncio as get_handlers, +) +from workflows.openapi_generated_client.workflows_api_client.api.default.get_results_handler_id import ( + asyncio as get_results_handler_id, +) +from workflows.openapi_generated_client.workflows_api_client.api.default.get_events_handler_id import ( + asyncio as get_events_handler_id, +) +from workflows.openapi_generated_client.workflows_api_client.api.default.post_workflows_name_run import ( + asyncio as post_workflows_name_run, +) +from workflows.openapi_generated_client.workflows_api_client.api.default.post_workflows_name_run_nowait import ( + asyncio as post_workflows_name_run_nowait, +) +from workflows.openapi_generated_client.workflows_api_client.api.default.post_events_handler_id import ( + asyncio as post_events_handler_id, +) + +# GENERATED TYPES (API) +from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_body import ( + PostWorkflowsNameRunBody, +) +from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_body_context import ( + PostWorkflowsNameRunBodyContext, +) +from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_body_start_event import ( + PostWorkflowsNameRunBodyStartEvent, +) +from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_body_kwargs import ( + PostWorkflowsNameRunBodyKwargs, +) +from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_nowait_body import ( + PostWorkflowsNameRunNowaitBody, +) +from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_nowait_body_context import ( + PostWorkflowsNameRunNowaitBodyContext, +) +from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_nowait_body_kwargs import ( + PostWorkflowsNameRunNowaitBodyKwargs, +) +from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_nowait_body_start_event import ( + PostWorkflowsNameRunNowaitBodyStartEvent, +) +from workflows.openapi_generated_client.workflows_api_client.models.get_events_handler_id_response_200 import ( + GetEventsHandlerIdResponse200, +) +from workflows.openapi_generated_client.workflows_api_client.models.post_events_handler_id_body import ( + PostEventsHandlerIdBody, +) +from workflows.openapi_generated_client.workflows_api_client.models.post_events_handler_id_response_200 import ( + PostEventsHandlerIdResponse200, +) +from workflows.openapi_generated_client.workflows_api_client.models.handler import ( + Handler, +) +from workflows.openapi_generated_client.workflows_api_client.models.handler_status import ( + HandlerStatus, +) +from workflows.openapi_generated_client.workflows_api_client.types import Unset, UNSET + +# MISC from workflows import Context - - -logger = getLogger(__name__) +from workflows.events import StartEvent, Event +from workflows.context.serializers import JsonSerializer +from .utils import AuthDetails, EventDict +from typing import Literal, Optional, Any, Union, cast class WorkflowClient: def __init__( self, - protocol: Literal["http", "https"] | None = None, - host: str | None = None, - port: int | None = None, - timeout: int | None = None, - ): - # TODO: middleware-related logic - self.protocol = protocol or "http" - self.host = host or "localhost" - self.port = port or 8000 - self.timeout = timeout or 600 - # TODO: add some basic TLS/verification and auth features - - @asynccontextmanager - async def _get_client(self) -> AsyncIterator: - async with httpx.AsyncClient( - base_url=self.protocol + "://" + self.host + ":" + str(self.port), - timeout=self.timeout, - ) as client: - yield client + protocol: Literal["http", "https"] = "http", + host: str = "localhost", + port: int = 80, + auth_details: Optional[AuthDetails] = None, + raise_on_unexpected_status: bool = True, + **kwargs: Any, + ) -> None: + self.base_url = f"{protocol}://{host}:{port}" + if auth_details: + self._client = AuthenticatedClient( + base_url=self.base_url, + token=auth_details.token, + prefix=auth_details.prefix, + auth_header_name=auth_details.auth_header_name, + raise_on_unexpected_status=raise_on_unexpected_status, + cookies=kwargs.get("cookies", {}), + headers=kwargs.get("headers", {}), + timeout=kwargs.get("timeout", None), + verify_ssl=kwargs.get("verify_ssl", True), + follow_redirects=kwargs.get("follow_redirects", False), + httpx_args=kwargs.get("httpx_args", {}), + ) + else: + self._client = Client( + base_url=self.base_url, + raise_on_unexpected_status=raise_on_unexpected_status, + cookies=kwargs.get("cookies", {}), + headers=kwargs.get("headers", {}), + timeout=kwargs.get("timeout", None), + verify_ssl=kwargs.get("verify_ssl", True), + follow_redirects=kwargs.get("follow_redirects", False), + httpx_args=kwargs.get("httpx_args", {}), + ) async def is_healthy(self) -> bool: - """ - Check whether the workflow server is helathy or not - - Returns: - bool: True if the workflow server is healthy, false if not - """ - async with self._get_client() as client: - response = await client.get("/health") - if response.status_code == 200: - return response.json().get("status", "") == "healthy" - return False + response = await get_health(client=self._client) + if not response: + return False + return True async def ping(self) -> float: - """ - Ping the workflow and get the latency in milliseconds - - Returns: - float: latency in milliseconds - """ - async with self._get_client() as client: - start = time.time() - response = await client.get("/health") - if response.status_code == 200: - end = time.time() - return (end - start) * 1000 - else: - raise httpx.ConnectError( - f"Failed to establish a connection with server running on: {self.protocol}://{self.host}:{self.port}" - ) + start = time.time() + response = await get_health(client=self._client) + if not response: + return -1 + return (time.time() - start) * 1000 async def list_workflows(self) -> list[str]: - """ - List workflows - - Returns: - list: List of workflow names available through the server. - """ - async with self._get_client() as client: - response = await client.get("/workflows") - - response.raise_for_status() - - return response.json()["workflows"] + response = await get_workflows(client=self._client) + if not response: + return [] + return response.workflows async def run_workflow( self, @@ -87,43 +146,29 @@ async def run_workflow( start_event: Union[StartEvent, dict[str, Any], None] = None, context: Union[Context, dict[str, Any], None] = None, **kwargs: Any, - ) -> Any: - """ - Run the workflow and wait until completion. - - Args: - start_event (Union[StartEvent, dict[str, Any], None]): start event class or dictionary representation (optional, defaults to None and get passed as an empty dictionary if not provided). - context: Context or serialized representation of it (optional, defaults to None if not provided) - **kwargs: Any number of keyword arguments that would be passed on as additional keyword arguments to the workflow. - - Returns: - Any: Result of the workflow - """ - if isinstance(start_event, StartEvent): - try: - start_event = start_event.model_dump() - except Exception as e: - raise ValueError( - f"Impossible to serialize the start event because of: {e}" - ) - if isinstance(context, Context): + ) -> Handler: + if start_event and isinstance(start_event, StartEvent): + start_event = start_event.model_dump() + if context and isinstance(context, Context): try: context = context.to_dict() except Exception as e: - raise ValueError(f"Impossible to serialize the context because of: {e}") - request_body = { - "start_event": start_event or {}, - "context": context or {}, - "additional_kwargs": kwargs, - } - async with self._get_client() as client: - response = await client.post( - f"/workflows/{workflow_name}/run", json=request_body - ) - - response.raise_for_status() - - return response.json()["result"] + raise ValueError(f"Impossible to serialize context because of: {e}") + response = await post_workflows_name_run( + name=workflow_name, + client=self._client, + body=PostWorkflowsNameRunBody( + start_event=PostWorkflowsNameRunBodyStartEvent.from_dict( + cast(dict, start_event) or {} + ), + context=PostWorkflowsNameRunBodyContext.from_dict(context or {}), + kwargs=PostWorkflowsNameRunBodyKwargs.from_dict(kwargs), + ), + ) + if isinstance(response, Handler): + return response + else: + raise ValueError("Response was not properly generated") async def run_workflow_nowait( self, @@ -131,212 +176,91 @@ async def run_workflow_nowait( start_event: Union[StartEvent, dict[str, Any], None] = None, context: Union[Context, dict[str, Any], None] = None, **kwargs: Any, - ) -> str: - """ - Run the workflow in the background. - - Args: - start_event (Union[StartEvent, dict[str, Any], None]): start event class or dictionary representation (optional, defaults to None and get passed as an empty dictionary if not provided). - context: Context or serialized representation of it (optional, defaults to None if not provided) - **kwargs: Any number of keyword arguments that would be passed on as additional keyword arguments to the workflow. - - Returns: - str: ID of the handler running the workflow - """ - if isinstance(start_event, StartEvent): - try: - start_event = start_event.model_dump() - except Exception as e: - raise ValueError( - f"Impossible to serialize the start event because of: {e}" - ) - if isinstance(context, Context): + ) -> Handler: + if start_event and isinstance(start_event, StartEvent): + start_event = start_event.model_dump() + if context and isinstance(context, Context): try: context = context.to_dict() except Exception as e: - raise ValueError(f"Impossible to serialize the context because of: {e}") - request_body = { - "start_event": start_event or {}, - "context": context or {}, - "additional_kwargs": kwargs, - } - async with self._get_client() as client: - response = await client.post( - f"/workflows/{workflow_name}/run-nowait", json=request_body - ) - - response.raise_for_status() - - return response.json()["handler_id"] - - async def _stream_events_sse( - self, - handler_id: str, - ) -> AsyncGenerator[dict[str, Any], None]: - """ - Stream events using Server-Sent Events format - """ - url = f"/events/{handler_id}?sse=true" - - async with self._get_client() as client: - try: - async with client.stream( - "GET", - url, - ) as response: - # Handle different response codes - if response.status_code == 404: - raise ValueError("Handler not found") - elif response.status_code == 204: - # Handler completed, no more events - return # type: ignore - - response.raise_for_status() - - async for line in response.aiter_lines(): - if line.startswith("data: "): - # Extract JSON from SSE data line - json_data = line[6:] # Remove 'data: ' prefix - if json_data.strip(): # Skip empty data lines - try: - event = json.loads(json_data.replace("\n", "")) - yield event.get("value", {}) - except json.JSONDecodeError as e: - print( - f"Failed to parse JSON: {e}, data: {json_data}" - ) - continue - - except httpx.TimeoutException: - raise TimeoutError( - f"Timeout waiting for events from handler {handler_id}" - ) - except httpx.RequestError as e: - raise ConnectionError(f"Failed to connect to event stream: {e}") - - async def _stream_events_ndjson( - self, - handler_id: str, - ) -> AsyncGenerator[dict[str, Any], None]: - """ - Stream events using newline-delimited JSON format - """ - url = f"/events/{handler_id}?sse=false" - - async with self._get_client() as client: - try: - async with client.stream("GET", url) as response: - # Handle different response codes - if response.status_code == 404: - raise ValueError("Handler not found") - elif response.status_code == 204: - # Handler completed, no more events - return - - response.raise_for_status() - - async for line in response.aiter_lines(): - if line.strip(): # Skip empty lines - try: - event = json.loads(line.replace("\n", "")) - yield event.get("value", {}) - except json.JSONDecodeError as e: - print(f"Failed to parse JSON: {e}, data: {line}") - continue - - except httpx.TimeoutException: - raise TimeoutError( - f"Timeout waiting for events from handler {handler_id}" - ) - except httpx.RequestError as e: - raise ConnectionError(f"Failed to connect to event stream: {e}") + raise ValueError(f"Impossible to serialize context because of: {e}") + response = await post_workflows_name_run_nowait( + name=workflow_name, + client=self._client, + body=PostWorkflowsNameRunNowaitBody( + start_event=PostWorkflowsNameRunNowaitBodyStartEvent.from_dict( + cast(dict, start_event) or {} + ), + context=PostWorkflowsNameRunNowaitBodyContext.from_dict(context or {}), + kwargs=PostWorkflowsNameRunNowaitBodyKwargs.from_dict(kwargs), + ), + ) + if isinstance(response, Handler): + return response + else: + raise ValueError("Response was not properly generated") - async def stream_events( - self, - handler_id: str, - event_callback: Callable[[dict[str, Any]], Any] | None = None, - sse: bool = True, - ) -> None: - """ - Stream events from a running handler. + async def get_workflow_events(self, handler: Handler) -> dict: + response = await get_events_handler_id( + handler_id=handler.handler_id, client=self._client, sse=False + ) + if isinstance(response, GetEventsHandlerIdResponse200): + return response.to_dict() + else: + raise ValueError("Response was not properly generated") - Args: - handler_id (str): ID of the handler streaming the events - event_callback (Callable[[dict[str, Any]], Any]): Function to call when an event is received from the stream (optional, defaults to None) - sse (bool): Whether to enable server-sent events or not + async def get_workflow_handlers(self) -> list[Handler]: + response = await get_handlers(client=self._client) + if response: + return response.handlers + else: + raise ValueError("Response was not properly generated") - Returns: - None - """ - callback = event_callback or ( - lambda event: logger.info(f"Processing data: {event}") + async def get_workflow_result(self, handler: Handler) -> Any: + response = await get_results_handler_id( + handler_id=handler.handler_id, client=self._client ) - is_async = inspect.iscoroutinefunction(callback) - if sse: - async for event in self._stream_events_sse(handler_id): - if is_async: - await callback(event) # type: ignore - else: - callback(event) + if isinstance(response, Handler): + if response.status == HandlerStatus.COMPLETED: + return response.result + elif response.status == HandlerStatus.RUNNING: + return response.status.value + else: + return response.error + elif isinstance(response, str): + return response else: - async for event in self._stream_events_ndjson(handler_id): - if is_async: - await callback(event) # type: ignore - else: - callback(event) - return None + raise ValueError("Response was not properly generated") - async def send_event( + async def send_workflow_event( self, - handler_id: str, - event: Event | dict[str, Any] | str, - step: str | None = None, - ) -> bool: - """ - Send an event to the workflow. - - Args: - handler_id (str): ID of the handler of the running workflow to send the event to - event (Event | dict[str, Any] | str): Event to send, represented as an Event object, a dictionary or a serialized string. - step (str | None): Step to send the event to (optional, defaults to None) - - Returns: - bool: Success status of the send operation - """ + handler: Handler, + event: Union[Event, EventDict, str], + step: Optional[str] = None, + ) -> str: if isinstance(event, Event): try: - event = event.model_dump_json() + event = JsonSerializer().serialize(event) except Exception as e: - raise ValueError(f"Error while serializing the provided event: {e}") - elif isinstance(event, dict): + raise ValueError( + f"It was not possible to serialize the event you want to send because of: {e}" + ) + elif event is EventDict: + event.setdefault("__is_pydantic", True) try: event = json.dumps(event) except Exception as e: - raise ValueError(f"Error while serializing the provided event: {e}") - request_body = {"event": event} - if step: - request_body.update({"step": step}) - async with self._get_client() as client: - response = await client.post(f"/events/{handler_id}", json=request_body) - response.raise_for_status() - - return response.json()["status"] == "sent" - - async def get_result(self, handler_id: str) -> Any: - """ - Get the result of the workflow associated with the specified handler ID. - - Args: - handler_id (str): ID of the handler running the workflow - - Returns: - Any: Result of the workflow - """ - async with self._get_client() as client: - response = await client.get(f"/results/{handler_id}") - response.raise_for_status() - - if response.status_code == 202: - return - - return response.json()["result"] + raise ValueError( + f"It was not possible to serialize the event you want to send because of: {e}" + ) + if not step: + step: Unset = UNSET + response = await post_events_handler_id( + handler_id=handler.handler_id, + client=self._client, + body=PostEventsHandlerIdBody(event=cast(str, event), step=step), + ) + if isinstance(response, PostEventsHandlerIdResponse200): + return response.status.value + else: + raise ValueError("Response was not properly generated") diff --git a/src/workflows/client/utils.py b/src/workflows/client/utils.py new file mode 100644 index 0000000..983d575 --- /dev/null +++ b/src/workflows/client/utils.py @@ -0,0 +1,20 @@ +from pydantic import BaseModel, Field +from typing import TypedDict, Any, NotRequired + + +class AuthDetails(BaseModel): + token: str = Field(description="Authentication token") + prefix: str = Field( + description="Prefix in the authentication header (defaults to `Bearer`)", + default="Bearer", + ) + auth_header_name: str = Field( + description="Authentication header name (defaults to `Authentication`)", + default="Authentication", + ) + + +class EventDict(TypedDict): + __is_pydantic: NotRequired[bool] + qualified_name: str + value: dict[str, Any] diff --git a/src/workflows/openapi_generated_client/__init__.py b/src/workflows/openapi_generated_client/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/workflows/openapi_generated_client/workflows_api_client/__init__.py b/src/workflows/openapi_generated_client/workflows_api_client/__init__.py new file mode 100644 index 0000000..62e122c --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/__init__.py @@ -0,0 +1,8 @@ +"""A client library for accessing Workflows API""" + +from .client import AuthenticatedClient, Client + +__all__ = ( + "AuthenticatedClient", + "Client", +) diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/__init__.py b/src/workflows/openapi_generated_client/workflows_api_client/api/__init__.py new file mode 100644 index 0000000..81f9fa2 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/__init__.py @@ -0,0 +1 @@ +"""Contains methods for accessing the API""" diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/__init__.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/__init__.py new file mode 100644 index 0000000..2d7c0b2 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/__init__.py @@ -0,0 +1 @@ +"""Contains endpoint functions for accessing the API""" diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_events_handler_id.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_events_handler_id.py new file mode 100644 index 0000000..e9509da --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_events_handler_id.py @@ -0,0 +1,216 @@ +from http import HTTPStatus +from typing import Any, Optional, Union, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.get_events_handler_id_response_200 import GetEventsHandlerIdResponse200 +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + handler_id: str, + *, + sse: Union[Unset, bool] = True, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + params["sse"] = sse + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "get", + "url": f"/events/{handler_id}", + "params": params, + } + + return _kwargs + + +def _parse_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Optional[Union[Any, GetEventsHandlerIdResponse200]]: + if response.status_code == 200: + response_200 = GetEventsHandlerIdResponse200.from_dict(response.text) + + return response_200 + + if response.status_code == 404: + response_404 = cast(Any, None) + return response_404 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Response[Union[Any, GetEventsHandlerIdResponse200]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], + sse: Union[Unset, bool] = True, +) -> Response[Union[Any, GetEventsHandlerIdResponse200]]: + r"""Stream workflow events + + Streams events produced by a workflow execution. Events are emitted as + newline-delimited JSON by default, or as Server-Sent Events when `sse=true`. + Event data is formatted according to llama-index's json serializer. For + pydantic serializable python types, it returns: + { + \"__is_pydantic\": True, + \"value\": , + \"qualified_name\": + } + + Args: + handler_id (str): + sse (Union[Unset, bool]): Default: True. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, GetEventsHandlerIdResponse200]] + """ + + kwargs = _get_kwargs( + handler_id=handler_id, + sse=sse, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], + sse: Union[Unset, bool] = True, +) -> Optional[Union[Any, GetEventsHandlerIdResponse200]]: + r"""Stream workflow events + + Streams events produced by a workflow execution. Events are emitted as + newline-delimited JSON by default, or as Server-Sent Events when `sse=true`. + Event data is formatted according to llama-index's json serializer. For + pydantic serializable python types, it returns: + { + \"__is_pydantic\": True, + \"value\": , + \"qualified_name\": + } + + Args: + handler_id (str): + sse (Union[Unset, bool]): Default: True. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, GetEventsHandlerIdResponse200] + """ + + return sync_detailed( + handler_id=handler_id, + client=client, + sse=sse, + ).parsed + + +async def asyncio_detailed( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], + sse: Union[Unset, bool] = True, +) -> Response[Union[Any, GetEventsHandlerIdResponse200]]: + r"""Stream workflow events + + Streams events produced by a workflow execution. Events are emitted as + newline-delimited JSON by default, or as Server-Sent Events when `sse=true`. + Event data is formatted according to llama-index's json serializer. For + pydantic serializable python types, it returns: + { + \"__is_pydantic\": True, + \"value\": , + \"qualified_name\": + } + + Args: + handler_id (str): + sse (Union[Unset, bool]): Default: True. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, GetEventsHandlerIdResponse200]] + """ + + kwargs = _get_kwargs( + handler_id=handler_id, + sse=sse, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], + sse: Union[Unset, bool] = True, +) -> Optional[Union[Any, GetEventsHandlerIdResponse200]]: + r"""Stream workflow events + + Streams events produced by a workflow execution. Events are emitted as + newline-delimited JSON by default, or as Server-Sent Events when `sse=true`. + Event data is formatted according to llama-index's json serializer. For + pydantic serializable python types, it returns: + { + \"__is_pydantic\": True, + \"value\": , + \"qualified_name\": + } + + Args: + handler_id (str): + sse (Union[Unset, bool]): Default: True. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, GetEventsHandlerIdResponse200] + """ + + return ( + await asyncio_detailed( + handler_id=handler_id, + client=client, + sse=sse, + ) + ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_handlers.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_handlers.py new file mode 100644 index 0000000..2f3491b --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_handlers.py @@ -0,0 +1,131 @@ +from http import HTTPStatus +from typing import Any, Optional, Union + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.handlers_list import HandlersList +from ...types import Response + + +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/handlers", + } + + return _kwargs + + +def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[HandlersList]: + if response.status_code == 200: + response_200 = HandlersList.from_dict(response.json()) + + return response_200 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[HandlersList]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: Union[AuthenticatedClient, Client], +) -> Response[HandlersList]: + """Get handlers + + Returns all workflow handlers. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HandlersList] + """ + + kwargs = _get_kwargs() + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[HandlersList]: + """Get handlers + + Returns all workflow handlers. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HandlersList + """ + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: Union[AuthenticatedClient, Client], +) -> Response[HandlersList]: + """Get handlers + + Returns all workflow handlers. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[HandlersList] + """ + + kwargs = _get_kwargs() + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[HandlersList]: + """Get handlers + + Returns all workflow handlers. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + HandlersList + """ + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_health.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_health.py new file mode 100644 index 0000000..fabfb32 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_health.py @@ -0,0 +1,135 @@ +from http import HTTPStatus +from typing import Any, Optional, Union + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.get_health_response_200 import GetHealthResponse200 +from ...types import Response + + +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/health", + } + + return _kwargs + + +def _parse_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Optional[GetHealthResponse200]: + if response.status_code == 200: + response_200 = GetHealthResponse200.from_dict(response.json()) + + return response_200 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Response[GetHealthResponse200]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: Union[AuthenticatedClient, Client], +) -> Response[GetHealthResponse200]: + """Health check + + Returns the server health status. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[GetHealthResponse200] + """ + + kwargs = _get_kwargs() + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[GetHealthResponse200]: + """Health check + + Returns the server health status. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + GetHealthResponse200 + """ + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: Union[AuthenticatedClient, Client], +) -> Response[GetHealthResponse200]: + """Health check + + Returns the server health status. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[GetHealthResponse200] + """ + + kwargs = _get_kwargs() + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[GetHealthResponse200]: + """Health check + + Returns the server health status. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + GetHealthResponse200 + """ + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_results_handler_id.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_results_handler_id.py new file mode 100644 index 0000000..b1f9bff --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_results_handler_id.py @@ -0,0 +1,172 @@ +from http import HTTPStatus +from typing import Any, Optional, Union, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.handler import Handler +from ...types import Response + + +def _get_kwargs( + handler_id: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": f"/results/{handler_id}", + } + + return _kwargs + + +def _parse_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Optional[Union[Any, Handler, str]]: + if response.status_code == 200: + response_200 = Handler.from_dict(response.json()) + + return response_200 + + if response.status_code == 202: + response_202 = Handler.from_dict(response.json()) + + return response_202 + + if response.status_code == 404: + response_404 = cast(Any, None) + return response_404 + + if response.status_code == 500: + response_500 = response.text + return response_500 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Response[Union[Any, Handler, str]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Response[Union[Any, Handler, str]]: + """Get workflow result + + Returns the final result of an asynchronously started workflow, if available + + Args: + handler_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, Handler, str]] + """ + + kwargs = _get_kwargs( + handler_id=handler_id, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[Union[Any, Handler, str]]: + """Get workflow result + + Returns the final result of an asynchronously started workflow, if available + + Args: + handler_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, Handler, str] + """ + + return sync_detailed( + handler_id=handler_id, + client=client, + ).parsed + + +async def asyncio_detailed( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Response[Union[Any, Handler, str]]: + """Get workflow result + + Returns the final result of an asynchronously started workflow, if available + + Args: + handler_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, Handler, str]] + """ + + kwargs = _get_kwargs( + handler_id=handler_id, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[Union[Any, Handler, str]]: + """Get workflow result + + Returns the final result of an asynchronously started workflow, if available + + Args: + handler_id (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, Handler, str] + """ + + return ( + await asyncio_detailed( + handler_id=handler_id, + client=client, + ) + ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows.py new file mode 100644 index 0000000..8d9fd1e --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows.py @@ -0,0 +1,135 @@ +from http import HTTPStatus +from typing import Any, Optional, Union + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.get_workflows_response_200 import GetWorkflowsResponse200 +from ...types import Response + + +def _get_kwargs() -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": "/workflows", + } + + return _kwargs + + +def _parse_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Optional[GetWorkflowsResponse200]: + if response.status_code == 200: + response_200 = GetWorkflowsResponse200.from_dict(response.json()) + + return response_200 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Response[GetWorkflowsResponse200]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + *, + client: Union[AuthenticatedClient, Client], +) -> Response[GetWorkflowsResponse200]: + """List workflows + + Returns the list of registered workflow names. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[GetWorkflowsResponse200] + """ + + kwargs = _get_kwargs() + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[GetWorkflowsResponse200]: + """List workflows + + Returns the list of registered workflow names. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + GetWorkflowsResponse200 + """ + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: Union[AuthenticatedClient, Client], +) -> Response[GetWorkflowsResponse200]: + """List workflows + + Returns the list of registered workflow names. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[GetWorkflowsResponse200] + """ + + kwargs = _get_kwargs() + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[GetWorkflowsResponse200]: + """List workflows + + Returns the list of registered workflow names. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + GetWorkflowsResponse200 + """ + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_representation.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_representation.py new file mode 100644 index 0000000..c8f649f --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_representation.py @@ -0,0 +1,167 @@ +from http import HTTPStatus +from typing import Any, Optional, Union, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.get_workflows_name_representation_response_200 import GetWorkflowsNameRepresentationResponse200 +from ...types import Response + + +def _get_kwargs( + name: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": f"/workflows/{name}/representation", + } + + return _kwargs + + +def _parse_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Optional[Union[Any, GetWorkflowsNameRepresentationResponse200]]: + if response.status_code == 200: + response_200 = GetWorkflowsNameRepresentationResponse200.from_dict(response.json()) + + return response_200 + + if response.status_code == 404: + response_404 = cast(Any, None) + return response_404 + + if response.status_code == 500: + response_500 = cast(Any, None) + return response_500 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Response[Union[Any, GetWorkflowsNameRepresentationResponse200]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + name: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Response[Union[Any, GetWorkflowsNameRepresentationResponse200]]: + """Get the representation of the workflow + + Get the representation of the workflow as a directed graph in JSON format + + Args: + name (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, GetWorkflowsNameRepresentationResponse200]] + """ + + kwargs = _get_kwargs( + name=name, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + name: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[Union[Any, GetWorkflowsNameRepresentationResponse200]]: + """Get the representation of the workflow + + Get the representation of the workflow as a directed graph in JSON format + + Args: + name (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, GetWorkflowsNameRepresentationResponse200] + """ + + return sync_detailed( + name=name, + client=client, + ).parsed + + +async def asyncio_detailed( + name: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Response[Union[Any, GetWorkflowsNameRepresentationResponse200]]: + """Get the representation of the workflow + + Get the representation of the workflow as a directed graph in JSON format + + Args: + name (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, GetWorkflowsNameRepresentationResponse200]] + """ + + kwargs = _get_kwargs( + name=name, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + name: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[Union[Any, GetWorkflowsNameRepresentationResponse200]]: + """Get the representation of the workflow + + Get the representation of the workflow as a directed graph in JSON format + + Args: + name (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, GetWorkflowsNameRepresentationResponse200] + """ + + return ( + await asyncio_detailed( + name=name, + client=client, + ) + ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_schema.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_schema.py new file mode 100644 index 0000000..0cbb6c5 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_schema.py @@ -0,0 +1,171 @@ +from http import HTTPStatus +from typing import Any, Optional, Union, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.get_workflows_name_schema_response_200 import GetWorkflowsNameSchemaResponse200 +from ...types import Response + + +def _get_kwargs( + name: str, +) -> dict[str, Any]: + _kwargs: dict[str, Any] = { + "method": "get", + "url": f"/workflows/{name}/schema", + } + + return _kwargs + + +def _parse_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Optional[Union[Any, GetWorkflowsNameSchemaResponse200]]: + if response.status_code == 200: + response_200 = GetWorkflowsNameSchemaResponse200.from_dict(response.json()) + + return response_200 + + if response.status_code == 404: + response_404 = cast(Any, None) + return response_404 + + if response.status_code == 500: + response_500 = cast(Any, None) + return response_500 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Response[Union[Any, GetWorkflowsNameSchemaResponse200]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + name: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Response[Union[Any, GetWorkflowsNameSchemaResponse200]]: + r"""Get JSON schema for start event + + Gets the JSON schema of the start and stop events from the specified workflow and returns it under + \"start\" (start event) and \"stop\" (stop event) + + Args: + name (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, GetWorkflowsNameSchemaResponse200]] + """ + + kwargs = _get_kwargs( + name=name, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + name: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[Union[Any, GetWorkflowsNameSchemaResponse200]]: + r"""Get JSON schema for start event + + Gets the JSON schema of the start and stop events from the specified workflow and returns it under + \"start\" (start event) and \"stop\" (stop event) + + Args: + name (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, GetWorkflowsNameSchemaResponse200] + """ + + return sync_detailed( + name=name, + client=client, + ).parsed + + +async def asyncio_detailed( + name: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Response[Union[Any, GetWorkflowsNameSchemaResponse200]]: + r"""Get JSON schema for start event + + Gets the JSON schema of the start and stop events from the specified workflow and returns it under + \"start\" (start event) and \"stop\" (stop event) + + Args: + name (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, GetWorkflowsNameSchemaResponse200]] + """ + + kwargs = _get_kwargs( + name=name, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + name: str, + *, + client: Union[AuthenticatedClient, Client], +) -> Optional[Union[Any, GetWorkflowsNameSchemaResponse200]]: + r"""Get JSON schema for start event + + Gets the JSON schema of the start and stop events from the specified workflow and returns it under + \"start\" (start event) and \"stop\" (stop event) + + Args: + name (str): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, GetWorkflowsNameSchemaResponse200] + """ + + return ( + await asyncio_detailed( + name=name, + client=client, + ) + ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_events_handler_id.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_events_handler_id.py new file mode 100644 index 0000000..9e667e1 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_events_handler_id.py @@ -0,0 +1,193 @@ +from http import HTTPStatus +from typing import Any, Optional, Union, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.post_events_handler_id_body import PostEventsHandlerIdBody +from ...models.post_events_handler_id_response_200 import PostEventsHandlerIdResponse200 +from ...types import Response + + +def _get_kwargs( + handler_id: str, + *, + body: PostEventsHandlerIdBody, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": f"/events/{handler_id}", + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Optional[Union[Any, PostEventsHandlerIdResponse200]]: + if response.status_code == 200: + response_200 = PostEventsHandlerIdResponse200.from_dict(response.json()) + + return response_200 + + if response.status_code == 400: + response_400 = cast(Any, None) + return response_400 + + if response.status_code == 404: + response_404 = cast(Any, None) + return response_404 + + if response.status_code == 409: + response_409 = cast(Any, None) + return response_409 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Response[Union[Any, PostEventsHandlerIdResponse200]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostEventsHandlerIdBody, +) -> Response[Union[Any, PostEventsHandlerIdResponse200]]: + """Send event to workflow + + Sends an event to a running workflow's context. + + Args: + handler_id (str): + body (PostEventsHandlerIdBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, PostEventsHandlerIdResponse200]] + """ + + kwargs = _get_kwargs( + handler_id=handler_id, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostEventsHandlerIdBody, +) -> Optional[Union[Any, PostEventsHandlerIdResponse200]]: + """Send event to workflow + + Sends an event to a running workflow's context. + + Args: + handler_id (str): + body (PostEventsHandlerIdBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, PostEventsHandlerIdResponse200] + """ + + return sync_detailed( + handler_id=handler_id, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostEventsHandlerIdBody, +) -> Response[Union[Any, PostEventsHandlerIdResponse200]]: + """Send event to workflow + + Sends an event to a running workflow's context. + + Args: + handler_id (str): + body (PostEventsHandlerIdBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, PostEventsHandlerIdResponse200]] + """ + + kwargs = _get_kwargs( + handler_id=handler_id, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + handler_id: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostEventsHandlerIdBody, +) -> Optional[Union[Any, PostEventsHandlerIdResponse200]]: + """Send event to workflow + + Sends an event to a running workflow's context. + + Args: + handler_id (str): + body (PostEventsHandlerIdBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, PostEventsHandlerIdResponse200] + """ + + return ( + await asyncio_detailed( + handler_id=handler_id, + client=client, + body=body, + ) + ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run.py new file mode 100644 index 0000000..1b2b7cb --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run.py @@ -0,0 +1,201 @@ +from http import HTTPStatus +from typing import Any, Optional, Union, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.handler import Handler +from ...models.post_workflows_name_run_body import PostWorkflowsNameRunBody +from ...types import Response + + +def _get_kwargs( + name: str, + *, + body: PostWorkflowsNameRunBody, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": f"/workflows/{name}/run", + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Optional[Union[Any, Handler]]: + if response.status_code == 200: + response_200 = Handler.from_dict(response.json()) + + return response_200 + + if response.status_code == 400: + response_400 = cast(Any, None) + return response_400 + + if response.status_code == 404: + response_404 = cast(Any, None) + return response_404 + + if response.status_code == 500: + response_500 = cast(Any, None) + return response_500 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Response[Union[Any, Handler]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + name: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostWorkflowsNameRunBody, +) -> Response[Union[Any, Handler]]: + """Run workflow (wait) + + Runs the specified workflow synchronously and returns the final result. + The request body may include an optional serialized start event, an optional + context object, and optional keyword arguments passed to the workflow run. + + Args: + name (str): + body (PostWorkflowsNameRunBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, Handler]] + """ + + kwargs = _get_kwargs( + name=name, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + name: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostWorkflowsNameRunBody, +) -> Optional[Union[Any, Handler]]: + """Run workflow (wait) + + Runs the specified workflow synchronously and returns the final result. + The request body may include an optional serialized start event, an optional + context object, and optional keyword arguments passed to the workflow run. + + Args: + name (str): + body (PostWorkflowsNameRunBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, Handler] + """ + + return sync_detailed( + name=name, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + name: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostWorkflowsNameRunBody, +) -> Response[Union[Any, Handler]]: + """Run workflow (wait) + + Runs the specified workflow synchronously and returns the final result. + The request body may include an optional serialized start event, an optional + context object, and optional keyword arguments passed to the workflow run. + + Args: + name (str): + body (PostWorkflowsNameRunBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, Handler]] + """ + + kwargs = _get_kwargs( + name=name, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + name: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostWorkflowsNameRunBody, +) -> Optional[Union[Any, Handler]]: + """Run workflow (wait) + + Runs the specified workflow synchronously and returns the final result. + The request body may include an optional serialized start event, an optional + context object, and optional keyword arguments passed to the workflow run. + + Args: + name (str): + body (PostWorkflowsNameRunBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, Handler] + """ + + return ( + await asyncio_detailed( + name=name, + client=client, + body=body, + ) + ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run_nowait.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run_nowait.py new file mode 100644 index 0000000..191ad29 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run_nowait.py @@ -0,0 +1,193 @@ +from http import HTTPStatus +from typing import Any, Optional, Union, cast + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.handler import Handler +from ...models.post_workflows_name_run_nowait_body import PostWorkflowsNameRunNowaitBody +from ...types import Response + + +def _get_kwargs( + name: str, + *, + body: PostWorkflowsNameRunNowaitBody, +) -> dict[str, Any]: + headers: dict[str, Any] = {} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": f"/workflows/{name}/run-nowait", + } + + _kwargs["json"] = body.to_dict() + + headers["Content-Type"] = "application/json" + + _kwargs["headers"] = headers + return _kwargs + + +def _parse_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Optional[Union[Any, Handler]]: + if response.status_code == 200: + response_200 = Handler.from_dict(response.json()) + + return response_200 + + if response.status_code == 400: + response_400 = cast(Any, None) + return response_400 + + if response.status_code == 404: + response_404 = cast(Any, None) + return response_404 + + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: Union[AuthenticatedClient, Client], response: httpx.Response +) -> Response[Union[Any, Handler]]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + name: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostWorkflowsNameRunNowaitBody, +) -> Response[Union[Any, Handler]]: + """Run workflow (no-wait) + + Starts the specified workflow asynchronously and returns a handler identifier + which can be used to query results or stream events. + + Args: + name (str): + body (PostWorkflowsNameRunNowaitBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, Handler]] + """ + + kwargs = _get_kwargs( + name=name, + body=body, + ) + + response = client.get_httpx_client().request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + name: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostWorkflowsNameRunNowaitBody, +) -> Optional[Union[Any, Handler]]: + """Run workflow (no-wait) + + Starts the specified workflow asynchronously and returns a handler identifier + which can be used to query results or stream events. + + Args: + name (str): + body (PostWorkflowsNameRunNowaitBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, Handler] + """ + + return sync_detailed( + name=name, + client=client, + body=body, + ).parsed + + +async def asyncio_detailed( + name: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostWorkflowsNameRunNowaitBody, +) -> Response[Union[Any, Handler]]: + """Run workflow (no-wait) + + Starts the specified workflow asynchronously and returns a handler identifier + which can be used to query results or stream events. + + Args: + name (str): + body (PostWorkflowsNameRunNowaitBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[Any, Handler]] + """ + + kwargs = _get_kwargs( + name=name, + body=body, + ) + + response = await client.get_async_httpx_client().request(**kwargs) + + return _build_response(client=client, response=response) + + +async def asyncio( + name: str, + *, + client: Union[AuthenticatedClient, Client], + body: PostWorkflowsNameRunNowaitBody, +) -> Optional[Union[Any, Handler]]: + """Run workflow (no-wait) + + Starts the specified workflow asynchronously and returns a handler identifier + which can be used to query results or stream events. + + Args: + name (str): + body (PostWorkflowsNameRunNowaitBody): + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[Any, Handler] + """ + + return ( + await asyncio_detailed( + name=name, + client=client, + body=body, + ) + ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/client.py b/src/workflows/openapi_generated_client/workflows_api_client/client.py new file mode 100644 index 0000000..e80446f --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/client.py @@ -0,0 +1,268 @@ +import ssl +from typing import Any, Optional, Union + +import httpx +from attrs import define, evolve, field + + +@define +class Client: + """A class for keeping track of data related to the API + + The following are accepted as keyword arguments and will be used to construct httpx Clients internally: + + ``base_url``: The base URL for the API, all requests are made to a relative path to this URL + + ``cookies``: A dictionary of cookies to be sent with every request + + ``headers``: A dictionary of headers to be sent with every request + + ``timeout``: The maximum amount of a time a request can take. API functions will raise + httpx.TimeoutException if this is exceeded. + + ``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production, + but can be set to False for testing purposes. + + ``follow_redirects``: Whether or not to follow redirects. Default value is False. + + ``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor. + + + Attributes: + raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a + status code that was not documented in the source OpenAPI document. Can also be provided as a keyword + argument to the constructor. + """ + + raise_on_unexpected_status: bool = field(default=False, kw_only=True) + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: Optional[httpx.Client] = field(default=None, init=False) + _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) + + def with_headers(self, headers: dict[str, str]) -> "Client": + """Get a new client matching this one with additional headers""" + if self._client is not None: + self._client.headers.update(headers) + if self._async_client is not None: + self._async_client.headers.update(headers) + return evolve(self, headers={**self._headers, **headers}) + + def with_cookies(self, cookies: dict[str, str]) -> "Client": + """Get a new client matching this one with additional cookies""" + if self._client is not None: + self._client.cookies.update(cookies) + if self._async_client is not None: + self._async_client.cookies.update(cookies) + return evolve(self, cookies={**self._cookies, **cookies}) + + def with_timeout(self, timeout: httpx.Timeout) -> "Client": + """Get a new client matching this one with a new timeout (in seconds)""" + if self._client is not None: + self._client.timeout = timeout + if self._async_client is not None: + self._async_client.timeout = timeout + return evolve(self, timeout=timeout) + + def set_httpx_client(self, client: httpx.Client) -> "Client": + """Manually set the underlying httpx.Client + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._client = client + return self + + def get_httpx_client(self) -> httpx.Client: + """Get the underlying httpx.Client, constructing a new one if not previously set""" + if self._client is None: + self._client = httpx.Client( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._client + + def __enter__(self) -> "Client": + """Enter a context manager for self.client—you cannot enter twice (see httpx docs)""" + self.get_httpx_client().__enter__() + return self + + def __exit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for internal httpx.Client (see httpx docs)""" + self.get_httpx_client().__exit__(*args, **kwargs) + + def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "Client": + """Manually the underlying httpx.AsyncClient + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._async_client = async_client + return self + + def get_async_httpx_client(self) -> httpx.AsyncClient: + """Get the underlying httpx.AsyncClient, constructing a new one if not previously set""" + if self._async_client is None: + self._async_client = httpx.AsyncClient( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._async_client + + async def __aenter__(self) -> "Client": + """Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)""" + await self.get_async_httpx_client().__aenter__() + return self + + async def __aexit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for underlying httpx.AsyncClient (see httpx docs)""" + await self.get_async_httpx_client().__aexit__(*args, **kwargs) + + +@define +class AuthenticatedClient: + """A Client which has been authenticated for use on secured endpoints + + The following are accepted as keyword arguments and will be used to construct httpx Clients internally: + + ``base_url``: The base URL for the API, all requests are made to a relative path to this URL + + ``cookies``: A dictionary of cookies to be sent with every request + + ``headers``: A dictionary of headers to be sent with every request + + ``timeout``: The maximum amount of a time a request can take. API functions will raise + httpx.TimeoutException if this is exceeded. + + ``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production, + but can be set to False for testing purposes. + + ``follow_redirects``: Whether or not to follow redirects. Default value is False. + + ``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor. + + + Attributes: + raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a + status code that was not documented in the source OpenAPI document. Can also be provided as a keyword + argument to the constructor. + token: The token to use for authentication + prefix: The prefix to use for the Authorization header + auth_header_name: The name of the Authorization header + """ + + raise_on_unexpected_status: bool = field(default=False, kw_only=True) + _base_url: str = field(alias="base_url") + _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") + _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") + _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True, alias="timeout") + _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True, alias="verify_ssl") + _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") + _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") + _client: Optional[httpx.Client] = field(default=None, init=False) + _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) + + token: str + prefix: str = "Bearer" + auth_header_name: str = "Authorization" + + def with_headers(self, headers: dict[str, str]) -> "AuthenticatedClient": + """Get a new client matching this one with additional headers""" + if self._client is not None: + self._client.headers.update(headers) + if self._async_client is not None: + self._async_client.headers.update(headers) + return evolve(self, headers={**self._headers, **headers}) + + def with_cookies(self, cookies: dict[str, str]) -> "AuthenticatedClient": + """Get a new client matching this one with additional cookies""" + if self._client is not None: + self._client.cookies.update(cookies) + if self._async_client is not None: + self._async_client.cookies.update(cookies) + return evolve(self, cookies={**self._cookies, **cookies}) + + def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": + """Get a new client matching this one with a new timeout (in seconds)""" + if self._client is not None: + self._client.timeout = timeout + if self._async_client is not None: + self._async_client.timeout = timeout + return evolve(self, timeout=timeout) + + def set_httpx_client(self, client: httpx.Client) -> "AuthenticatedClient": + """Manually set the underlying httpx.Client + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._client = client + return self + + def get_httpx_client(self) -> httpx.Client: + """Get the underlying httpx.Client, constructing a new one if not previously set""" + if self._client is None: + self._headers[self.auth_header_name] = f"{self.prefix} {self.token}" if self.prefix else self.token + self._client = httpx.Client( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._client + + def __enter__(self) -> "AuthenticatedClient": + """Enter a context manager for self.client—you cannot enter twice (see httpx docs)""" + self.get_httpx_client().__enter__() + return self + + def __exit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for internal httpx.Client (see httpx docs)""" + self.get_httpx_client().__exit__(*args, **kwargs) + + def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "AuthenticatedClient": + """Manually the underlying httpx.AsyncClient + + **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. + """ + self._async_client = async_client + return self + + def get_async_httpx_client(self) -> httpx.AsyncClient: + """Get the underlying httpx.AsyncClient, constructing a new one if not previously set""" + if self._async_client is None: + self._headers[self.auth_header_name] = f"{self.prefix} {self.token}" if self.prefix else self.token + self._async_client = httpx.AsyncClient( + base_url=self._base_url, + cookies=self._cookies, + headers=self._headers, + timeout=self._timeout, + verify=self._verify_ssl, + follow_redirects=self._follow_redirects, + **self._httpx_args, + ) + return self._async_client + + async def __aenter__(self) -> "AuthenticatedClient": + """Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)""" + await self.get_async_httpx_client().__aenter__() + return self + + async def __aexit__(self, *args: Any, **kwargs: Any) -> None: + """Exit a context manager for underlying httpx.AsyncClient (see httpx docs)""" + await self.get_async_httpx_client().__aexit__(*args, **kwargs) diff --git a/src/workflows/openapi_generated_client/workflows_api_client/errors.py b/src/workflows/openapi_generated_client/workflows_api_client/errors.py new file mode 100644 index 0000000..5f92e76 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/errors.py @@ -0,0 +1,16 @@ +"""Contains shared errors types that can be raised from API functions""" + + +class UnexpectedStatus(Exception): + """Raised by api functions when the response status an undocumented status and Client.raise_on_unexpected_status is True""" + + def __init__(self, status_code: int, content: bytes): + self.status_code = status_code + self.content = content + + super().__init__( + f"Unexpected status code: {status_code}\n\nResponse content:\n{content.decode(errors='ignore')}" + ) + + +__all__ = ["UnexpectedStatus"] diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/__init__.py b/src/workflows/openapi_generated_client/workflows_api_client/models/__init__.py new file mode 100644 index 0000000..ce534b3 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/__init__.py @@ -0,0 +1,45 @@ +"""Contains all the data models used in inputs/outputs""" + +from .get_events_handler_id_response_200 import GetEventsHandlerIdResponse200 +from .get_events_handler_id_response_200_value import GetEventsHandlerIdResponse200Value +from .get_health_response_200 import GetHealthResponse200 +from .get_workflows_name_representation_response_200 import GetWorkflowsNameRepresentationResponse200 +from .get_workflows_name_schema_response_200 import GetWorkflowsNameSchemaResponse200 +from .get_workflows_response_200 import GetWorkflowsResponse200 +from .handler import Handler +from .handler_status import HandlerStatus +from .handlers_list import HandlersList +from .post_events_handler_id_body import PostEventsHandlerIdBody +from .post_events_handler_id_response_200 import PostEventsHandlerIdResponse200 +from .post_events_handler_id_response_200_status import PostEventsHandlerIdResponse200Status +from .post_workflows_name_run_body import PostWorkflowsNameRunBody +from .post_workflows_name_run_body_context import PostWorkflowsNameRunBodyContext +from .post_workflows_name_run_body_kwargs import PostWorkflowsNameRunBodyKwargs +from .post_workflows_name_run_body_start_event import PostWorkflowsNameRunBodyStartEvent +from .post_workflows_name_run_nowait_body import PostWorkflowsNameRunNowaitBody +from .post_workflows_name_run_nowait_body_context import PostWorkflowsNameRunNowaitBodyContext +from .post_workflows_name_run_nowait_body_kwargs import PostWorkflowsNameRunNowaitBodyKwargs +from .post_workflows_name_run_nowait_body_start_event import PostWorkflowsNameRunNowaitBodyStartEvent + +__all__ = ( + "GetEventsHandlerIdResponse200", + "GetEventsHandlerIdResponse200Value", + "GetHealthResponse200", + "GetWorkflowsNameRepresentationResponse200", + "GetWorkflowsNameSchemaResponse200", + "GetWorkflowsResponse200", + "Handler", + "HandlersList", + "HandlerStatus", + "PostEventsHandlerIdBody", + "PostEventsHandlerIdResponse200", + "PostEventsHandlerIdResponse200Status", + "PostWorkflowsNameRunBody", + "PostWorkflowsNameRunBodyContext", + "PostWorkflowsNameRunBodyKwargs", + "PostWorkflowsNameRunBodyStartEvent", + "PostWorkflowsNameRunNowaitBody", + "PostWorkflowsNameRunNowaitBodyContext", + "PostWorkflowsNameRunNowaitBodyKwargs", + "PostWorkflowsNameRunNowaitBodyStartEvent", +) diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200.py new file mode 100644 index 0000000..39d4775 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200.py @@ -0,0 +1,74 @@ +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.get_events_handler_id_response_200_value import GetEventsHandlerIdResponse200Value + + +T = TypeVar("T", bound="GetEventsHandlerIdResponse200") + + +@_attrs_define +class GetEventsHandlerIdResponse200: + """Server-Sent Events stream of event data. + + Attributes: + value (GetEventsHandlerIdResponse200Value): The event value. + qualified_name (str): The qualified name of the event. + """ + + value: "GetEventsHandlerIdResponse200Value" + qualified_name: str + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + value = self.value.to_dict() + + qualified_name = self.qualified_name + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "value": value, + "qualified_name": qualified_name, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.get_events_handler_id_response_200_value import GetEventsHandlerIdResponse200Value + + d = dict(src_dict) + value = GetEventsHandlerIdResponse200Value.from_dict(d.pop("value")) + + qualified_name = d.pop("qualified_name") + + get_events_handler_id_response_200 = cls( + value=value, + qualified_name=qualified_name, + ) + + get_events_handler_id_response_200.additional_properties = d + return get_events_handler_id_response_200 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200_value.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200_value.py new file mode 100644 index 0000000..942d949 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200_value.py @@ -0,0 +1,44 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="GetEventsHandlerIdResponse200Value") + + +@_attrs_define +class GetEventsHandlerIdResponse200Value: + """The event value.""" + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + get_events_handler_id_response_200_value = cls() + + get_events_handler_id_response_200_value.additional_properties = d + return get_events_handler_id_response_200_value + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_health_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_health_response_200.py new file mode 100644 index 0000000..3d5af72 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/get_health_response_200.py @@ -0,0 +1,59 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="GetHealthResponse200") + + +@_attrs_define +class GetHealthResponse200: + """ + Attributes: + status (str): Example: healthy. + """ + + status: str + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + status = self.status + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "status": status, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + status = d.pop("status") + + get_health_response_200 = cls( + status=status, + ) + + get_health_response_200.additional_properties = d + return get_health_response_200 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_representation_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_representation_response_200.py new file mode 100644 index 0000000..1276c1f --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_representation_response_200.py @@ -0,0 +1,59 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="GetWorkflowsNameRepresentationResponse200") + + +@_attrs_define +class GetWorkflowsNameRepresentationResponse200: + """ + Attributes: + graph (Any): the elements of the JSON representation of the workflow + """ + + graph: Any + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + graph = self.graph + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "graph": graph, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + graph = d.pop("graph") + + get_workflows_name_representation_response_200 = cls( + graph=graph, + ) + + get_workflows_name_representation_response_200.additional_properties = d + return get_workflows_name_representation_response_200 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_schema_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_schema_response_200.py new file mode 100644 index 0000000..13c7d2c --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_schema_response_200.py @@ -0,0 +1,67 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="GetWorkflowsNameSchemaResponse200") + + +@_attrs_define +class GetWorkflowsNameSchemaResponse200: + """ + Attributes: + start (Any): JSON schema for the start event + stop (Any): JSON schema for the stop event + """ + + start: Any + stop: Any + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + start = self.start + + stop = self.stop + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "start": start, + "stop": stop, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + start = d.pop("start") + + stop = d.pop("stop") + + get_workflows_name_schema_response_200 = cls( + start=start, + stop=stop, + ) + + get_workflows_name_schema_response_200.additional_properties = d + return get_workflows_name_schema_response_200 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_response_200.py new file mode 100644 index 0000000..6062dfa --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_response_200.py @@ -0,0 +1,59 @@ +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="GetWorkflowsResponse200") + + +@_attrs_define +class GetWorkflowsResponse200: + """ + Attributes: + workflows (list[str]): + """ + + workflows: list[str] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + workflows = self.workflows + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "workflows": workflows, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + workflows = cast(list[str], d.pop("workflows")) + + get_workflows_response_200 = cls( + workflows=workflows, + ) + + get_workflows_response_200.additional_properties = d + return get_workflows_response_200 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/handler.py b/src/workflows/openapi_generated_client/workflows_api_client/models/handler.py new file mode 100644 index 0000000..499d0d1 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/handler.py @@ -0,0 +1,197 @@ +import datetime +from collections.abc import Mapping +from typing import Any, TypeVar, Union, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +from ..models.handler_status import HandlerStatus +from ..types import UNSET, Unset + +T = TypeVar("T", bound="Handler") + + +@_attrs_define +class Handler: + """ + Attributes: + handler_id (str): + workflow_name (str): + status (HandlerStatus): + started_at (datetime.datetime): + run_id (Union[None, Unset, str]): + updated_at (Union[None, Unset, datetime.datetime]): + completed_at (Union[None, Unset, datetime.datetime]): + error (Union[None, Unset, str]): + result (Union[Unset, Any]): Workflow result value + """ + + handler_id: str + workflow_name: str + status: HandlerStatus + started_at: datetime.datetime + run_id: Union[None, Unset, str] = UNSET + updated_at: Union[None, Unset, datetime.datetime] = UNSET + completed_at: Union[None, Unset, datetime.datetime] = UNSET + error: Union[None, Unset, str] = UNSET + result: Union[Unset, Any] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + handler_id = self.handler_id + + workflow_name = self.workflow_name + + status = self.status.value + + started_at = self.started_at.isoformat() + + run_id: Union[None, Unset, str] + if isinstance(self.run_id, Unset): + run_id = UNSET + else: + run_id = self.run_id + + updated_at: Union[None, Unset, str] + if isinstance(self.updated_at, Unset): + updated_at = UNSET + elif isinstance(self.updated_at, datetime.datetime): + updated_at = self.updated_at.isoformat() + else: + updated_at = self.updated_at + + completed_at: Union[None, Unset, str] + if isinstance(self.completed_at, Unset): + completed_at = UNSET + elif isinstance(self.completed_at, datetime.datetime): + completed_at = self.completed_at.isoformat() + else: + completed_at = self.completed_at + + error: Union[None, Unset, str] + if isinstance(self.error, Unset): + error = UNSET + else: + error = self.error + + result = self.result + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "handler_id": handler_id, + "workflow_name": workflow_name, + "status": status, + "started_at": started_at, + } + ) + if run_id is not UNSET: + field_dict["run_id"] = run_id + if updated_at is not UNSET: + field_dict["updated_at"] = updated_at + if completed_at is not UNSET: + field_dict["completed_at"] = completed_at + if error is not UNSET: + field_dict["error"] = error + if result is not UNSET: + field_dict["result"] = result + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + handler_id = d.pop("handler_id") + + workflow_name = d.pop("workflow_name") + + status = HandlerStatus(d.pop("status")) + + started_at = isoparse(d.pop("started_at")) + + def _parse_run_id(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + run_id = _parse_run_id(d.pop("run_id", UNSET)) + + def _parse_updated_at(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + updated_at_type_0 = isoparse(data) + + return updated_at_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + updated_at = _parse_updated_at(d.pop("updated_at", UNSET)) + + def _parse_completed_at(data: object) -> Union[None, Unset, datetime.datetime]: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, str): + raise TypeError() + completed_at_type_0 = isoparse(data) + + return completed_at_type_0 + except: # noqa: E722 + pass + return cast(Union[None, Unset, datetime.datetime], data) + + completed_at = _parse_completed_at(d.pop("completed_at", UNSET)) + + def _parse_error(data: object) -> Union[None, Unset, str]: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(Union[None, Unset, str], data) + + error = _parse_error(d.pop("error", UNSET)) + + result = d.pop("result", UNSET) + + handler = cls( + handler_id=handler_id, + workflow_name=workflow_name, + status=status, + started_at=started_at, + run_id=run_id, + updated_at=updated_at, + completed_at=completed_at, + error=error, + result=result, + ) + + handler.additional_properties = d + return handler + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/handler_status.py b/src/workflows/openapi_generated_client/workflows_api_client/models/handler_status.py new file mode 100644 index 0000000..2473289 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/handler_status.py @@ -0,0 +1,10 @@ +from enum import Enum + + +class HandlerStatus(str, Enum): + COMPLETED = "completed" + FAILED = "failed" + RUNNING = "running" + + def __str__(self) -> str: + return str(self.value) diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/handlers_list.py b/src/workflows/openapi_generated_client/workflows_api_client/models/handlers_list.py new file mode 100644 index 0000000..ed0cc69 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/handlers_list.py @@ -0,0 +1,73 @@ +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +if TYPE_CHECKING: + from ..models.handler import Handler + + +T = TypeVar("T", bound="HandlersList") + + +@_attrs_define +class HandlersList: + """ + Attributes: + handlers (list['Handler']): + """ + + handlers: list["Handler"] + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + handlers = [] + for handlers_item_data in self.handlers: + handlers_item = handlers_item_data.to_dict() + handlers.append(handlers_item) + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "handlers": handlers, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.handler import Handler + + d = dict(src_dict) + handlers = [] + _handlers = d.pop("handlers") + for handlers_item_data in _handlers: + handlers_item = Handler.from_dict(handlers_item_data) + + handlers.append(handlers_item) + + handlers_list = cls( + handlers=handlers, + ) + + handlers_list.additional_properties = d + return handlers_list + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_body.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_body.py new file mode 100644 index 0000000..87c9e8c --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_body.py @@ -0,0 +1,70 @@ +from collections.abc import Mapping +from typing import Any, TypeVar, Union + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="PostEventsHandlerIdBody") + + +@_attrs_define +class PostEventsHandlerIdBody: + """ + Attributes: + event (str): Serialized event in JSON format. + step (Union[Unset, str]): Optional target step name. If not provided, event is sent to all steps. + """ + + event: str + step: Union[Unset, str] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + event = self.event + + step = self.step + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "event": event, + } + ) + if step is not UNSET: + field_dict["step"] = step + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + event = d.pop("event") + + step = d.pop("step", UNSET) + + post_events_handler_id_body = cls( + event=event, + step=step, + ) + + post_events_handler_id_body.additional_properties = d + return post_events_handler_id_body + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200.py new file mode 100644 index 0000000..f8406b0 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200.py @@ -0,0 +1,61 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..models.post_events_handler_id_response_200_status import PostEventsHandlerIdResponse200Status + +T = TypeVar("T", bound="PostEventsHandlerIdResponse200") + + +@_attrs_define +class PostEventsHandlerIdResponse200: + """ + Attributes: + status (PostEventsHandlerIdResponse200Status): + """ + + status: PostEventsHandlerIdResponse200Status + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + status = self.status.value + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "status": status, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + status = PostEventsHandlerIdResponse200Status(d.pop("status")) + + post_events_handler_id_response_200 = cls( + status=status, + ) + + post_events_handler_id_response_200.additional_properties = d + return post_events_handler_id_response_200 + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200_status.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200_status.py new file mode 100644 index 0000000..75857f2 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200_status.py @@ -0,0 +1,8 @@ +from enum import Enum + + +class PostEventsHandlerIdResponse200Status(str, Enum): + SENT = "sent" + + def __str__(self) -> str: + return str(self.value) diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body.py new file mode 100644 index 0000000..92fab2c --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body.py @@ -0,0 +1,109 @@ +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, Union + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.post_workflows_name_run_body_context import PostWorkflowsNameRunBodyContext + from ..models.post_workflows_name_run_body_kwargs import PostWorkflowsNameRunBodyKwargs + from ..models.post_workflows_name_run_body_start_event import PostWorkflowsNameRunBodyStartEvent + + +T = TypeVar("T", bound="PostWorkflowsNameRunBody") + + +@_attrs_define +class PostWorkflowsNameRunBody: + """ + Attributes: + start_event (Union[Unset, PostWorkflowsNameRunBodyStartEvent]): Plain JSON object representing the start event + (e.g., {"message": "..."}). + context (Union[Unset, PostWorkflowsNameRunBodyContext]): Serialized workflow Context. + kwargs (Union[Unset, PostWorkflowsNameRunBodyKwargs]): Additional keyword arguments for the workflow. + """ + + start_event: Union[Unset, "PostWorkflowsNameRunBodyStartEvent"] = UNSET + context: Union[Unset, "PostWorkflowsNameRunBodyContext"] = UNSET + kwargs: Union[Unset, "PostWorkflowsNameRunBodyKwargs"] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + start_event: Union[Unset, dict[str, Any]] = UNSET + if not isinstance(self.start_event, Unset): + start_event = self.start_event.to_dict() + + context: Union[Unset, dict[str, Any]] = UNSET + if not isinstance(self.context, Unset): + context = self.context.to_dict() + + kwargs: Union[Unset, dict[str, Any]] = UNSET + if not isinstance(self.kwargs, Unset): + kwargs = self.kwargs.to_dict() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if start_event is not UNSET: + field_dict["start_event"] = start_event + if context is not UNSET: + field_dict["context"] = context + if kwargs is not UNSET: + field_dict["kwargs"] = kwargs + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.post_workflows_name_run_body_context import PostWorkflowsNameRunBodyContext + from ..models.post_workflows_name_run_body_kwargs import PostWorkflowsNameRunBodyKwargs + from ..models.post_workflows_name_run_body_start_event import PostWorkflowsNameRunBodyStartEvent + + d = dict(src_dict) + _start_event = d.pop("start_event", UNSET) + start_event: Union[Unset, PostWorkflowsNameRunBodyStartEvent] + if isinstance(_start_event, Unset): + start_event = UNSET + else: + start_event = PostWorkflowsNameRunBodyStartEvent.from_dict(_start_event) + + _context = d.pop("context", UNSET) + context: Union[Unset, PostWorkflowsNameRunBodyContext] + if isinstance(_context, Unset): + context = UNSET + else: + context = PostWorkflowsNameRunBodyContext.from_dict(_context) + + _kwargs = d.pop("kwargs", UNSET) + kwargs: Union[Unset, PostWorkflowsNameRunBodyKwargs] + if isinstance(_kwargs, Unset): + kwargs = UNSET + else: + kwargs = PostWorkflowsNameRunBodyKwargs.from_dict(_kwargs) + + post_workflows_name_run_body = cls( + start_event=start_event, + context=context, + kwargs=kwargs, + ) + + post_workflows_name_run_body.additional_properties = d + return post_workflows_name_run_body + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_context.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_context.py new file mode 100644 index 0000000..36380d5 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_context.py @@ -0,0 +1,44 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="PostWorkflowsNameRunBodyContext") + + +@_attrs_define +class PostWorkflowsNameRunBodyContext: + """Serialized workflow Context.""" + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + post_workflows_name_run_body_context = cls() + + post_workflows_name_run_body_context.additional_properties = d + return post_workflows_name_run_body_context + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_kwargs.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_kwargs.py new file mode 100644 index 0000000..f39b723 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_kwargs.py @@ -0,0 +1,44 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="PostWorkflowsNameRunBodyKwargs") + + +@_attrs_define +class PostWorkflowsNameRunBodyKwargs: + """Additional keyword arguments for the workflow.""" + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + post_workflows_name_run_body_kwargs = cls() + + post_workflows_name_run_body_kwargs.additional_properties = d + return post_workflows_name_run_body_kwargs + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_start_event.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_start_event.py new file mode 100644 index 0000000..4ccd9ca --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_start_event.py @@ -0,0 +1,44 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="PostWorkflowsNameRunBodyStartEvent") + + +@_attrs_define +class PostWorkflowsNameRunBodyStartEvent: + """Plain JSON object representing the start event (e.g., {"message": "..."}).""" + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + post_workflows_name_run_body_start_event = cls() + + post_workflows_name_run_body_start_event.additional_properties = d + return post_workflows_name_run_body_start_event + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body.py new file mode 100644 index 0000000..8d06954 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body.py @@ -0,0 +1,109 @@ +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, TypeVar, Union + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.post_workflows_name_run_nowait_body_context import PostWorkflowsNameRunNowaitBodyContext + from ..models.post_workflows_name_run_nowait_body_kwargs import PostWorkflowsNameRunNowaitBodyKwargs + from ..models.post_workflows_name_run_nowait_body_start_event import PostWorkflowsNameRunNowaitBodyStartEvent + + +T = TypeVar("T", bound="PostWorkflowsNameRunNowaitBody") + + +@_attrs_define +class PostWorkflowsNameRunNowaitBody: + """ + Attributes: + start_event (Union[Unset, PostWorkflowsNameRunNowaitBodyStartEvent]): Plain JSON object representing the start + event (e.g., {"message": "..."}). + context (Union[Unset, PostWorkflowsNameRunNowaitBodyContext]): Serialized workflow Context. + kwargs (Union[Unset, PostWorkflowsNameRunNowaitBodyKwargs]): Additional keyword arguments for the workflow. + """ + + start_event: Union[Unset, "PostWorkflowsNameRunNowaitBodyStartEvent"] = UNSET + context: Union[Unset, "PostWorkflowsNameRunNowaitBodyContext"] = UNSET + kwargs: Union[Unset, "PostWorkflowsNameRunNowaitBodyKwargs"] = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + start_event: Union[Unset, dict[str, Any]] = UNSET + if not isinstance(self.start_event, Unset): + start_event = self.start_event.to_dict() + + context: Union[Unset, dict[str, Any]] = UNSET + if not isinstance(self.context, Unset): + context = self.context.to_dict() + + kwargs: Union[Unset, dict[str, Any]] = UNSET + if not isinstance(self.kwargs, Unset): + kwargs = self.kwargs.to_dict() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if start_event is not UNSET: + field_dict["start_event"] = start_event + if context is not UNSET: + field_dict["context"] = context + if kwargs is not UNSET: + field_dict["kwargs"] = kwargs + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.post_workflows_name_run_nowait_body_context import PostWorkflowsNameRunNowaitBodyContext + from ..models.post_workflows_name_run_nowait_body_kwargs import PostWorkflowsNameRunNowaitBodyKwargs + from ..models.post_workflows_name_run_nowait_body_start_event import PostWorkflowsNameRunNowaitBodyStartEvent + + d = dict(src_dict) + _start_event = d.pop("start_event", UNSET) + start_event: Union[Unset, PostWorkflowsNameRunNowaitBodyStartEvent] + if isinstance(_start_event, Unset): + start_event = UNSET + else: + start_event = PostWorkflowsNameRunNowaitBodyStartEvent.from_dict(_start_event) + + _context = d.pop("context", UNSET) + context: Union[Unset, PostWorkflowsNameRunNowaitBodyContext] + if isinstance(_context, Unset): + context = UNSET + else: + context = PostWorkflowsNameRunNowaitBodyContext.from_dict(_context) + + _kwargs = d.pop("kwargs", UNSET) + kwargs: Union[Unset, PostWorkflowsNameRunNowaitBodyKwargs] + if isinstance(_kwargs, Unset): + kwargs = UNSET + else: + kwargs = PostWorkflowsNameRunNowaitBodyKwargs.from_dict(_kwargs) + + post_workflows_name_run_nowait_body = cls( + start_event=start_event, + context=context, + kwargs=kwargs, + ) + + post_workflows_name_run_nowait_body.additional_properties = d + return post_workflows_name_run_nowait_body + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_context.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_context.py new file mode 100644 index 0000000..b483d67 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_context.py @@ -0,0 +1,44 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="PostWorkflowsNameRunNowaitBodyContext") + + +@_attrs_define +class PostWorkflowsNameRunNowaitBodyContext: + """Serialized workflow Context.""" + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + post_workflows_name_run_nowait_body_context = cls() + + post_workflows_name_run_nowait_body_context.additional_properties = d + return post_workflows_name_run_nowait_body_context + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_kwargs.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_kwargs.py new file mode 100644 index 0000000..1754fd9 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_kwargs.py @@ -0,0 +1,44 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="PostWorkflowsNameRunNowaitBodyKwargs") + + +@_attrs_define +class PostWorkflowsNameRunNowaitBodyKwargs: + """Additional keyword arguments for the workflow.""" + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + post_workflows_name_run_nowait_body_kwargs = cls() + + post_workflows_name_run_nowait_body_kwargs.additional_properties = d + return post_workflows_name_run_nowait_body_kwargs + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_start_event.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_start_event.py new file mode 100644 index 0000000..e0ab722 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_start_event.py @@ -0,0 +1,44 @@ +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="PostWorkflowsNameRunNowaitBodyStartEvent") + + +@_attrs_define +class PostWorkflowsNameRunNowaitBodyStartEvent: + """Plain JSON object representing the start event (e.g., {"message": "..."}).""" + + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + post_workflows_name_run_nowait_body_start_event = cls() + + post_workflows_name_run_nowait_body_start_event.additional_properties = d + return post_workflows_name_run_nowait_body_start_event + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/py.typed b/src/workflows/openapi_generated_client/workflows_api_client/py.typed new file mode 100644 index 0000000..7632ecf --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/src/workflows/openapi_generated_client/workflows_api_client/types.py b/src/workflows/openapi_generated_client/workflows_api_client/types.py new file mode 100644 index 0000000..1b96ca4 --- /dev/null +++ b/src/workflows/openapi_generated_client/workflows_api_client/types.py @@ -0,0 +1,54 @@ +"""Contains some shared types for properties""" + +from collections.abc import Mapping, MutableMapping +from http import HTTPStatus +from typing import IO, BinaryIO, Generic, Literal, Optional, TypeVar, Union + +from attrs import define + + +class Unset: + def __bool__(self) -> Literal[False]: + return False + + +UNSET: Unset = Unset() + +# The types that `httpx.Client(files=)` can accept, copied from that library. +FileContent = Union[IO[bytes], bytes, str] +FileTypes = Union[ + # (filename, file (or bytes), content_type) + tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = list[tuple[str, FileTypes]] + + +@define +class File: + """Contains information for file uploads""" + + payload: BinaryIO + file_name: Optional[str] = None + mime_type: Optional[str] = None + + def to_tuple(self) -> FileTypes: + """Return a tuple representation that httpx will accept for multipart/form-data""" + return self.file_name, self.payload, self.mime_type + + +T = TypeVar("T") + + +@define +class Response(Generic[T]): + """A response from an endpoint""" + + status_code: HTTPStatus + content: bytes + headers: MutableMapping[str, str] + parsed: Optional[T] + + +__all__ = ["UNSET", "File", "FileTypes", "RequestFiles", "Response", "Unset"] diff --git a/src/workflows/server/server.py b/src/workflows/server/server.py index 997a7bc..15f8e4d 100644 --- a/src/workflows/server/server.py +++ b/src/workflows/server/server.py @@ -404,8 +404,6 @@ async def _get_events_schema(self, request: Request) -> JSONResponse: schema: type: string description: Registered workflow name. - requestBody: - required: false responses: 200: description: JSON schema successfully retrieved for start event @@ -455,8 +453,6 @@ async def _get_workflow_representation(self, request: Request) -> JSONResponse: schema: type: string description: Registered workflow name. - requestBody: - required: false responses: 200: description: JSON representation successfully retrieved From 8bb81c88b2e1524cc29b47f7b12eb2e11b8d5db0 Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Thu, 25 Sep 2025 16:53:04 +0200 Subject: [PATCH 03/13] chore: implement events streaming the good ol way --- examples/client/client.py | 6 ++-- examples/client/server.py | 5 ++-- src/workflows/client/client.py | 52 ++++++++++++++++++++++++---------- src/workflows/server/server.py | 7 ++--- 4 files changed, 46 insertions(+), 24 deletions(-) diff --git a/examples/client/client.py b/examples/client/client.py index 1c91abd..9614239 100644 --- a/examples/client/client.py +++ b/examples/client/client.py @@ -32,9 +32,9 @@ async def main() -> None: ) print("==== STARTING THE WORKFLOW ===") print(f"Workflow running with handler: {handler}") - # print("=== STREAMING EVENTS ===") - # events = await client.get_workflow_events(handler) - # print(events) + print("=== STREAMING EVENTS ===") + async for event in client.get_workflow_events(handler): + print("Received data:", event) # Poll for result result = handler.status.value while result == "running": diff --git a/examples/client/server.py b/examples/client/server.py index dbb578b..df4d24e 100644 --- a/examples/client/server.py +++ b/examples/client/server.py @@ -1,5 +1,5 @@ from workflows import Workflow, step, Context -from workflows.events import StartEvent, StopEvent, InputRequiredEvent +from workflows.events import StartEvent, StopEvent, Event from pydantic import Field from workflows.server import WorkflowServer @@ -12,7 +12,7 @@ class InputNumbers(StartEvent): operation: Literal["sum", "subtraction"] = Field(default="sum") -class CalculationEvent(InputRequiredEvent): +class CalculationEvent(Event): result: int @@ -46,6 +46,7 @@ async def second_step(self, ev: CalculationEvent, ctx: Context) -> OutputEvent: async def main() -> None: server = WorkflowServer() server.add_workflow("add_or_subtract", AddOrSubtractWorkflow(timeout=1000)) + server.add_workflow("add_or_subtract_2", AddOrSubtractWorkflow(timeout=1000)) try: await server.serve("localhost", 8000) except KeyboardInterrupt: diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index 5486283..f90bc3b 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -1,6 +1,7 @@ # TOP-LEVEL import time import json +import httpx # GENERATED CLASSES (CLIENTS) from workflows.openapi_generated_client.workflows_api_client import ( @@ -21,9 +22,6 @@ from workflows.openapi_generated_client.workflows_api_client.api.default.get_results_handler_id import ( asyncio as get_results_handler_id, ) -from workflows.openapi_generated_client.workflows_api_client.api.default.get_events_handler_id import ( - asyncio as get_events_handler_id, -) from workflows.openapi_generated_client.workflows_api_client.api.default.post_workflows_name_run import ( asyncio as post_workflows_name_run, ) @@ -59,9 +57,6 @@ from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_nowait_body_start_event import ( PostWorkflowsNameRunNowaitBodyStartEvent, ) -from workflows.openapi_generated_client.workflows_api_client.models.get_events_handler_id_response_200 import ( - GetEventsHandlerIdResponse200, -) from workflows.openapi_generated_client.workflows_api_client.models.post_events_handler_id_body import ( PostEventsHandlerIdBody, ) @@ -81,7 +76,7 @@ from workflows.events import StartEvent, Event from workflows.context.serializers import JsonSerializer from .utils import AuthDetails, EventDict -from typing import Literal, Optional, Any, Union, cast +from typing import Literal, Optional, Any, Union, cast, AsyncGenerator class WorkflowClient: @@ -200,14 +195,41 @@ async def run_workflow_nowait( else: raise ValueError("Response was not properly generated") - async def get_workflow_events(self, handler: Handler) -> dict: - response = await get_events_handler_id( - handler_id=handler.handler_id, client=self._client, sse=False - ) - if isinstance(response, GetEventsHandlerIdResponse200): - return response.to_dict() - else: - raise ValueError("Response was not properly generated") + async def get_workflow_events( + self, + handler: Handler, + ) -> AsyncGenerator[dict[str, Any], None]: + """ + Stream events using newline-delimited JSON format + """ + url = f"/events/{handler.handler_id}?sse=false" + client = self._client.get_async_httpx_client() + try: + async with client.stream("GET", url) as response: + # Handle different response codes + if response.status_code == 404: + raise ValueError("Handler not found") + elif response.status_code == 204: + # Handler completed, no more events + return + + response.raise_for_status() + + async for line in response.aiter_lines(): + if line.strip(): # Skip empty lines + try: + event = json.loads(line.replace("\n", "")) + yield event + except json.JSONDecodeError as e: + print(f"Failed to parse JSON: {e}, data: {line}") + continue + + except httpx.TimeoutException: + raise TimeoutError( + f"Timeout waiting for events from handler {handler.handler_id}" + ) + except httpx.RequestError as e: + raise ConnectionError(f"Failed to connect to event stream: {e}") async def get_workflow_handlers(self) -> list[Handler]: response = await get_handlers(client=self._client) diff --git a/src/workflows/server/server.py b/src/workflows/server/server.py index 15f8e4d..f07a0f2 100644 --- a/src/workflows/server/server.py +++ b/src/workflows/server/server.py @@ -602,9 +602,6 @@ async def _get_workflow_result(self, request: Request) -> JSONResponse: result = await handler self._results[handler_id] = result - if isinstance(result, StopEvent): - result = result.model_dump() - return JSONResponse(wrapper.to_dict()) except Exception as e: raise HTTPException( @@ -977,7 +974,9 @@ def to_dict(self) -> HandlerDict: if self.completed_at is not None else None, error=self.error, - result=self.result, + result=self.result.model_dump() + if isinstance(self.result, StopEvent) + else self.result, ) @property From beecc820a367d3cb6fd57f33ed809c2e19eb8ed7 Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Sat, 27 Sep 2025 22:23:08 +0200 Subject: [PATCH 04/13] Revert "chore: implement events streaming the good ol way" This reverts commit 8bb81c88b2e1524cc29b47f7b12eb2e11b8d5db0. --- examples/client/client.py | 6 ++-- examples/client/server.py | 5 ++-- src/workflows/client/client.py | 52 ++++++++++------------------------ src/workflows/server/server.py | 7 +++-- 4 files changed, 24 insertions(+), 46 deletions(-) diff --git a/examples/client/client.py b/examples/client/client.py index 9614239..1c91abd 100644 --- a/examples/client/client.py +++ b/examples/client/client.py @@ -32,9 +32,9 @@ async def main() -> None: ) print("==== STARTING THE WORKFLOW ===") print(f"Workflow running with handler: {handler}") - print("=== STREAMING EVENTS ===") - async for event in client.get_workflow_events(handler): - print("Received data:", event) + # print("=== STREAMING EVENTS ===") + # events = await client.get_workflow_events(handler) + # print(events) # Poll for result result = handler.status.value while result == "running": diff --git a/examples/client/server.py b/examples/client/server.py index df4d24e..dbb578b 100644 --- a/examples/client/server.py +++ b/examples/client/server.py @@ -1,5 +1,5 @@ from workflows import Workflow, step, Context -from workflows.events import StartEvent, StopEvent, Event +from workflows.events import StartEvent, StopEvent, InputRequiredEvent from pydantic import Field from workflows.server import WorkflowServer @@ -12,7 +12,7 @@ class InputNumbers(StartEvent): operation: Literal["sum", "subtraction"] = Field(default="sum") -class CalculationEvent(Event): +class CalculationEvent(InputRequiredEvent): result: int @@ -46,7 +46,6 @@ async def second_step(self, ev: CalculationEvent, ctx: Context) -> OutputEvent: async def main() -> None: server = WorkflowServer() server.add_workflow("add_or_subtract", AddOrSubtractWorkflow(timeout=1000)) - server.add_workflow("add_or_subtract_2", AddOrSubtractWorkflow(timeout=1000)) try: await server.serve("localhost", 8000) except KeyboardInterrupt: diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index f90bc3b..5486283 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -1,7 +1,6 @@ # TOP-LEVEL import time import json -import httpx # GENERATED CLASSES (CLIENTS) from workflows.openapi_generated_client.workflows_api_client import ( @@ -22,6 +21,9 @@ from workflows.openapi_generated_client.workflows_api_client.api.default.get_results_handler_id import ( asyncio as get_results_handler_id, ) +from workflows.openapi_generated_client.workflows_api_client.api.default.get_events_handler_id import ( + asyncio as get_events_handler_id, +) from workflows.openapi_generated_client.workflows_api_client.api.default.post_workflows_name_run import ( asyncio as post_workflows_name_run, ) @@ -57,6 +59,9 @@ from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_nowait_body_start_event import ( PostWorkflowsNameRunNowaitBodyStartEvent, ) +from workflows.openapi_generated_client.workflows_api_client.models.get_events_handler_id_response_200 import ( + GetEventsHandlerIdResponse200, +) from workflows.openapi_generated_client.workflows_api_client.models.post_events_handler_id_body import ( PostEventsHandlerIdBody, ) @@ -76,7 +81,7 @@ from workflows.events import StartEvent, Event from workflows.context.serializers import JsonSerializer from .utils import AuthDetails, EventDict -from typing import Literal, Optional, Any, Union, cast, AsyncGenerator +from typing import Literal, Optional, Any, Union, cast class WorkflowClient: @@ -195,41 +200,14 @@ async def run_workflow_nowait( else: raise ValueError("Response was not properly generated") - async def get_workflow_events( - self, - handler: Handler, - ) -> AsyncGenerator[dict[str, Any], None]: - """ - Stream events using newline-delimited JSON format - """ - url = f"/events/{handler.handler_id}?sse=false" - client = self._client.get_async_httpx_client() - try: - async with client.stream("GET", url) as response: - # Handle different response codes - if response.status_code == 404: - raise ValueError("Handler not found") - elif response.status_code == 204: - # Handler completed, no more events - return - - response.raise_for_status() - - async for line in response.aiter_lines(): - if line.strip(): # Skip empty lines - try: - event = json.loads(line.replace("\n", "")) - yield event - except json.JSONDecodeError as e: - print(f"Failed to parse JSON: {e}, data: {line}") - continue - - except httpx.TimeoutException: - raise TimeoutError( - f"Timeout waiting for events from handler {handler.handler_id}" - ) - except httpx.RequestError as e: - raise ConnectionError(f"Failed to connect to event stream: {e}") + async def get_workflow_events(self, handler: Handler) -> dict: + response = await get_events_handler_id( + handler_id=handler.handler_id, client=self._client, sse=False + ) + if isinstance(response, GetEventsHandlerIdResponse200): + return response.to_dict() + else: + raise ValueError("Response was not properly generated") async def get_workflow_handlers(self) -> list[Handler]: response = await get_handlers(client=self._client) diff --git a/src/workflows/server/server.py b/src/workflows/server/server.py index f07a0f2..15f8e4d 100644 --- a/src/workflows/server/server.py +++ b/src/workflows/server/server.py @@ -602,6 +602,9 @@ async def _get_workflow_result(self, request: Request) -> JSONResponse: result = await handler self._results[handler_id] = result + if isinstance(result, StopEvent): + result = result.model_dump() + return JSONResponse(wrapper.to_dict()) except Exception as e: raise HTTPException( @@ -974,9 +977,7 @@ def to_dict(self) -> HandlerDict: if self.completed_at is not None else None, error=self.error, - result=self.result.model_dump() - if isinstance(self.result, StopEvent) - else self.result, + result=self.result, ) @property From bc97f89ad10fde452787b3948a5f0c1d4a603e68 Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Sat, 27 Sep 2025 22:23:17 +0200 Subject: [PATCH 05/13] Revert "chore: use openapi as a base for client" This reverts commit b4bc1dfb66c2492ec15f04ec3782f30ce8832c1a. --- .gitignore | 3 - .pre-commit-config.yaml | 2 - examples/client/client.py | 54 +- src/workflows/client/client.py | 512 ++++++++++-------- src/workflows/client/utils.py | 20 - .../openapi_generated_client/__init__.py | 0 .../workflows_api_client/__init__.py | 8 - .../workflows_api_client/api/__init__.py | 1 - .../api/default/__init__.py | 1 - .../api/default/get_events_handler_id.py | 216 -------- .../api/default/get_handlers.py | 131 ----- .../api/default/get_health.py | 135 ----- .../api/default/get_results_handler_id.py | 172 ------ .../api/default/get_workflows.py | 135 ----- .../get_workflows_name_representation.py | 167 ------ .../api/default/get_workflows_name_schema.py | 171 ------ .../api/default/post_events_handler_id.py | 193 ------- .../api/default/post_workflows_name_run.py | 201 ------- .../default/post_workflows_name_run_nowait.py | 193 ------- .../workflows_api_client/client.py | 268 --------- .../workflows_api_client/errors.py | 16 - .../workflows_api_client/models/__init__.py | 45 -- .../get_events_handler_id_response_200.py | 74 --- ...et_events_handler_id_response_200_value.py | 44 -- .../models/get_health_response_200.py | 59 -- ...kflows_name_representation_response_200.py | 59 -- .../get_workflows_name_schema_response_200.py | 67 --- .../models/get_workflows_response_200.py | 59 -- .../workflows_api_client/models/handler.py | 197 ------- .../models/handler_status.py | 10 - .../models/handlers_list.py | 73 --- .../models/post_events_handler_id_body.py | 70 --- .../post_events_handler_id_response_200.py | 61 --- ...t_events_handler_id_response_200_status.py | 8 - .../models/post_workflows_name_run_body.py | 109 ---- .../post_workflows_name_run_body_context.py | 44 -- .../post_workflows_name_run_body_kwargs.py | 44 -- ...ost_workflows_name_run_body_start_event.py | 44 -- .../post_workflows_name_run_nowait_body.py | 109 ---- ..._workflows_name_run_nowait_body_context.py | 44 -- ...t_workflows_name_run_nowait_body_kwargs.py | 44 -- ...kflows_name_run_nowait_body_start_event.py | 44 -- .../workflows_api_client/py.typed | 1 - .../workflows_api_client/types.py | 54 -- src/workflows/server/server.py | 4 + 45 files changed, 340 insertions(+), 3626 deletions(-) delete mode 100644 src/workflows/client/utils.py delete mode 100644 src/workflows/openapi_generated_client/__init__.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/__init__.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/__init__.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/__init__.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_events_handler_id.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_handlers.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_health.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_results_handler_id.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_representation.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_schema.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/post_events_handler_id.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run_nowait.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/client.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/errors.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/__init__.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200_value.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_health_response_200.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_representation_response_200.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_schema_response_200.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_response_200.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/handler.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/handler_status.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/handlers_list.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_body.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200_status.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_context.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_kwargs.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_start_event.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_context.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_kwargs.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_start_event.py delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/py.typed delete mode 100644 src/workflows/openapi_generated_client/workflows_api_client/types.py diff --git a/.gitignore b/.gitignore index 200334a..8e07f9f 100644 --- a/.gitignore +++ b/.gitignore @@ -147,6 +147,3 @@ cython_debug/ # Generated files openapi.json -src/workflows/openapi_generated_client/README.md -src/workflows/openapi_generated_client/pyproject.toml -src/workflows/openapi_generated_client/.gitignore diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cc0072f..4eceac4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -62,5 +62,3 @@ repos: rev: v0.23.1 hooks: - id: toml-sort-fix - -exclude: ^(src/workflows/openapi_generated_client/) \ No newline at end of file diff --git a/examples/client/client.py b/examples/client/client.py index 1c91abd..64b93eb 100644 --- a/examples/client/client.py +++ b/examples/client/client.py @@ -2,16 +2,27 @@ from workflows.client.client import WorkflowClient -from workflows.events import StartEvent -from pydantic import Field +from workflows.events import StartEvent, HumanResponseEvent +from pydantic import PrivateAttr, model_validator, Field -from typing import Literal +from typing import Literal, Callable, Self class InputNumbers(StartEvent): a: int b: int operation: Literal["sum", "subtraction"] = Field(default="sum") + _function: Callable[[int, int], int] = PrivateAttr(default=lambda a, b: a + b) + + @model_validator(mode="after") + def assign_function(self) -> Self: + if self.operation == "subtraction": + self._function = lambda a, b: a - b + return self + + +class HumanApprovedResult(HumanResponseEvent): + approved: bool async def main() -> None: @@ -25,29 +36,48 @@ async def main() -> None: ping_time = await client.ping() print("==== PING TIME ====") print(ping_time, "ms") - handler = await client.run_workflow_nowait( + handler_id = await client.run_workflow_nowait( "add_or_subtract", start_event=InputNumbers(a=1, b=3, operation="sum"), context=None, ) print("==== STARTING THE WORKFLOW ===") - print(f"Workflow running with handler: {handler}") - # print("=== STREAMING EVENTS ===") - # events = await client.get_workflow_events(handler) - # print(events) + print(f"Workflow running with handler ID: {handler_id}") + print("=== STREAMING EVENTS ===") + + def handle_event(event_data: dict) -> None: + print(f"Received event: {event_data}") + + # Stream events in background + stream_task = asyncio.create_task( + client.stream_events( + handler_id=handler_id, + event_callback=handle_event, + sse=True, # Use Server-Sent Events + ) + ) + # Poll for result - result = handler.status.value - while result == "running": + result = None + while result is None: try: - result = await client.get_workflow_result(handler) - if result != "running": + result = await client.get_result(handler_id) + if result is not None: break await asyncio.sleep(1) except Exception as e: print(f"Error: {e}") await asyncio.sleep(1) + # Cancel streaming task + stream_task.cancel() + try: + await stream_task + except asyncio.CancelledError: + pass + print(f"Final result: {result}") + return result if __name__ == "__main__": diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index 5486283..0861754 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -1,144 +1,85 @@ -# TOP-LEVEL +import httpx import time import json +import inspect -# GENERATED CLASSES (CLIENTS) -from workflows.openapi_generated_client.workflows_api_client import ( - Client, - AuthenticatedClient, -) - -# GENERATED FUNCTIONS (API) -from workflows.openapi_generated_client.workflows_api_client.api.default.get_health import ( - asyncio as get_health, -) -from workflows.openapi_generated_client.workflows_api_client.api.default.get_workflows import ( - asyncio as get_workflows, -) -from workflows.openapi_generated_client.workflows_api_client.api.default.get_handlers import ( - asyncio as get_handlers, -) -from workflows.openapi_generated_client.workflows_api_client.api.default.get_results_handler_id import ( - asyncio as get_results_handler_id, -) -from workflows.openapi_generated_client.workflows_api_client.api.default.get_events_handler_id import ( - asyncio as get_events_handler_id, -) -from workflows.openapi_generated_client.workflows_api_client.api.default.post_workflows_name_run import ( - asyncio as post_workflows_name_run, -) -from workflows.openapi_generated_client.workflows_api_client.api.default.post_workflows_name_run_nowait import ( - asyncio as post_workflows_name_run_nowait, -) -from workflows.openapi_generated_client.workflows_api_client.api.default.post_events_handler_id import ( - asyncio as post_events_handler_id, -) - -# GENERATED TYPES (API) -from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_body import ( - PostWorkflowsNameRunBody, -) -from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_body_context import ( - PostWorkflowsNameRunBodyContext, -) -from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_body_start_event import ( - PostWorkflowsNameRunBodyStartEvent, -) -from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_body_kwargs import ( - PostWorkflowsNameRunBodyKwargs, -) -from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_nowait_body import ( - PostWorkflowsNameRunNowaitBody, -) -from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_nowait_body_context import ( - PostWorkflowsNameRunNowaitBodyContext, -) -from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_nowait_body_kwargs import ( - PostWorkflowsNameRunNowaitBodyKwargs, -) -from workflows.openapi_generated_client.workflows_api_client.models.post_workflows_name_run_nowait_body_start_event import ( - PostWorkflowsNameRunNowaitBodyStartEvent, -) -from workflows.openapi_generated_client.workflows_api_client.models.get_events_handler_id_response_200 import ( - GetEventsHandlerIdResponse200, -) -from workflows.openapi_generated_client.workflows_api_client.models.post_events_handler_id_body import ( - PostEventsHandlerIdBody, -) -from workflows.openapi_generated_client.workflows_api_client.models.post_events_handler_id_response_200 import ( - PostEventsHandlerIdResponse200, -) -from workflows.openapi_generated_client.workflows_api_client.models.handler import ( - Handler, -) -from workflows.openapi_generated_client.workflows_api_client.models.handler_status import ( - HandlerStatus, -) -from workflows.openapi_generated_client.workflows_api_client.types import Unset, UNSET - -# MISC -from workflows import Context +from typing import Literal, Any, Union, Callable, AsyncGenerator, AsyncIterator +from contextlib import asynccontextmanager +from logging import getLogger from workflows.events import StartEvent, Event -from workflows.context.serializers import JsonSerializer -from .utils import AuthDetails, EventDict -from typing import Literal, Optional, Any, Union, cast +from workflows import Context + + +logger = getLogger(__name__) class WorkflowClient: def __init__( self, - protocol: Literal["http", "https"] = "http", - host: str = "localhost", - port: int = 80, - auth_details: Optional[AuthDetails] = None, - raise_on_unexpected_status: bool = True, - **kwargs: Any, - ) -> None: - self.base_url = f"{protocol}://{host}:{port}" - if auth_details: - self._client = AuthenticatedClient( - base_url=self.base_url, - token=auth_details.token, - prefix=auth_details.prefix, - auth_header_name=auth_details.auth_header_name, - raise_on_unexpected_status=raise_on_unexpected_status, - cookies=kwargs.get("cookies", {}), - headers=kwargs.get("headers", {}), - timeout=kwargs.get("timeout", None), - verify_ssl=kwargs.get("verify_ssl", True), - follow_redirects=kwargs.get("follow_redirects", False), - httpx_args=kwargs.get("httpx_args", {}), - ) - else: - self._client = Client( - base_url=self.base_url, - raise_on_unexpected_status=raise_on_unexpected_status, - cookies=kwargs.get("cookies", {}), - headers=kwargs.get("headers", {}), - timeout=kwargs.get("timeout", None), - verify_ssl=kwargs.get("verify_ssl", True), - follow_redirects=kwargs.get("follow_redirects", False), - httpx_args=kwargs.get("httpx_args", {}), - ) + protocol: Literal["http", "https"] | None = None, + host: str | None = None, + port: int | None = None, + timeout: int | None = None, + ): + # TODO: middleware-related logic + self.protocol = protocol or "http" + self.host = host or "localhost" + self.port = port or 8000 + self.timeout = timeout or 600 + # TODO: add some basic TLS/verification and auth features + + @asynccontextmanager + async def _get_client(self) -> AsyncIterator: + async with httpx.AsyncClient( + base_url=self.protocol + "://" + self.host + ":" + str(self.port), + timeout=self.timeout, + ) as client: + yield client async def is_healthy(self) -> bool: - response = await get_health(client=self._client) - if not response: - return False - return True + """ + Check whether the workflow server is helathy or not + + Returns: + bool: True if the workflow server is healthy, false if not + """ + async with self._get_client() as client: + response = await client.get("/health") + if response.status_code == 200: + return response.json().get("status", "") == "healthy" + return False async def ping(self) -> float: - start = time.time() - response = await get_health(client=self._client) - if not response: - return -1 - return (time.time() - start) * 1000 + """ + Ping the workflow and get the latency in milliseconds + + Returns: + float: latency in milliseconds + """ + async with self._get_client() as client: + start = time.time() + response = await client.get("/health") + if response.status_code == 200: + end = time.time() + return (end - start) * 1000 + else: + raise httpx.ConnectError( + f"Failed to establish a connection with server running on: {self.protocol}://{self.host}:{self.port}" + ) async def list_workflows(self) -> list[str]: - response = await get_workflows(client=self._client) - if not response: - return [] - return response.workflows + """ + List workflows + + Returns: + list: List of workflow names available through the server. + """ + async with self._get_client() as client: + response = await client.get("/workflows") + + response.raise_for_status() + + return response.json()["workflows"] async def run_workflow( self, @@ -146,29 +87,43 @@ async def run_workflow( start_event: Union[StartEvent, dict[str, Any], None] = None, context: Union[Context, dict[str, Any], None] = None, **kwargs: Any, - ) -> Handler: - if start_event and isinstance(start_event, StartEvent): - start_event = start_event.model_dump() - if context and isinstance(context, Context): + ) -> Any: + """ + Run the workflow and wait until completion. + + Args: + start_event (Union[StartEvent, dict[str, Any], None]): start event class or dictionary representation (optional, defaults to None and get passed as an empty dictionary if not provided). + context: Context or serialized representation of it (optional, defaults to None if not provided) + **kwargs: Any number of keyword arguments that would be passed on as additional keyword arguments to the workflow. + + Returns: + Any: Result of the workflow + """ + if isinstance(start_event, StartEvent): + try: + start_event = start_event.model_dump() + except Exception as e: + raise ValueError( + f"Impossible to serialize the start event because of: {e}" + ) + if isinstance(context, Context): try: context = context.to_dict() except Exception as e: - raise ValueError(f"Impossible to serialize context because of: {e}") - response = await post_workflows_name_run( - name=workflow_name, - client=self._client, - body=PostWorkflowsNameRunBody( - start_event=PostWorkflowsNameRunBodyStartEvent.from_dict( - cast(dict, start_event) or {} - ), - context=PostWorkflowsNameRunBodyContext.from_dict(context or {}), - kwargs=PostWorkflowsNameRunBodyKwargs.from_dict(kwargs), - ), - ) - if isinstance(response, Handler): - return response - else: - raise ValueError("Response was not properly generated") + raise ValueError(f"Impossible to serialize the context because of: {e}") + request_body = { + "start_event": start_event or {}, + "context": context or {}, + "additional_kwargs": kwargs, + } + async with self._get_client() as client: + response = await client.post( + f"/workflows/{workflow_name}/run", json=request_body + ) + + response.raise_for_status() + + return response.json()["result"] async def run_workflow_nowait( self, @@ -176,91 +131,212 @@ async def run_workflow_nowait( start_event: Union[StartEvent, dict[str, Any], None] = None, context: Union[Context, dict[str, Any], None] = None, **kwargs: Any, - ) -> Handler: - if start_event and isinstance(start_event, StartEvent): - start_event = start_event.model_dump() - if context and isinstance(context, Context): + ) -> str: + """ + Run the workflow in the background. + + Args: + start_event (Union[StartEvent, dict[str, Any], None]): start event class or dictionary representation (optional, defaults to None and get passed as an empty dictionary if not provided). + context: Context or serialized representation of it (optional, defaults to None if not provided) + **kwargs: Any number of keyword arguments that would be passed on as additional keyword arguments to the workflow. + + Returns: + str: ID of the handler running the workflow + """ + if isinstance(start_event, StartEvent): + try: + start_event = start_event.model_dump() + except Exception as e: + raise ValueError( + f"Impossible to serialize the start event because of: {e}" + ) + if isinstance(context, Context): try: context = context.to_dict() except Exception as e: - raise ValueError(f"Impossible to serialize context because of: {e}") - response = await post_workflows_name_run_nowait( - name=workflow_name, - client=self._client, - body=PostWorkflowsNameRunNowaitBody( - start_event=PostWorkflowsNameRunNowaitBodyStartEvent.from_dict( - cast(dict, start_event) or {} - ), - context=PostWorkflowsNameRunNowaitBodyContext.from_dict(context or {}), - kwargs=PostWorkflowsNameRunNowaitBodyKwargs.from_dict(kwargs), - ), - ) - if isinstance(response, Handler): - return response - else: - raise ValueError("Response was not properly generated") + raise ValueError(f"Impossible to serialize the context because of: {e}") + request_body = { + "start_event": start_event or {}, + "context": context or {}, + "additional_kwargs": kwargs, + } + async with self._get_client() as client: + response = await client.post( + f"/workflows/{workflow_name}/run-nowait", json=request_body + ) - async def get_workflow_events(self, handler: Handler) -> dict: - response = await get_events_handler_id( - handler_id=handler.handler_id, client=self._client, sse=False - ) - if isinstance(response, GetEventsHandlerIdResponse200): - return response.to_dict() - else: - raise ValueError("Response was not properly generated") + response.raise_for_status() - async def get_workflow_handlers(self) -> list[Handler]: - response = await get_handlers(client=self._client) - if response: - return response.handlers - else: - raise ValueError("Response was not properly generated") + return response.json()["handler_id"] + + async def _stream_events_sse( + self, + handler_id: str, + ) -> AsyncGenerator[dict[str, Any], None]: + """ + Stream events using Server-Sent Events format + """ + url = f"/events/{handler_id}?sse=true" + + async with self._get_client() as client: + try: + async with client.stream( + "GET", + url, + ) as response: + # Handle different response codes + if response.status_code == 404: + raise ValueError("Handler not found") + elif response.status_code == 204: + # Handler completed, no more events + return # type: ignore + + response.raise_for_status() + + async for line in response.aiter_lines(): + if line.startswith("data: "): + # Extract JSON from SSE data line + json_data = line[6:] # Remove 'data: ' prefix + if json_data.strip(): # Skip empty data lines + try: + event = json.loads(json_data.replace("\n", "")) + yield event.get("value", {}) + except json.JSONDecodeError as e: + print( + f"Failed to parse JSON: {e}, data: {json_data}" + ) + continue + + except httpx.TimeoutException: + raise TimeoutError( + f"Timeout waiting for events from handler {handler_id}" + ) + except httpx.RequestError as e: + raise ConnectionError(f"Failed to connect to event stream: {e}") + + async def _stream_events_ndjson( + self, + handler_id: str, + ) -> AsyncGenerator[dict[str, Any], None]: + """ + Stream events using newline-delimited JSON format + """ + url = f"/events/{handler_id}?sse=false" + + async with self._get_client() as client: + try: + async with client.stream("GET", url) as response: + # Handle different response codes + if response.status_code == 404: + raise ValueError("Handler not found") + elif response.status_code == 204: + # Handler completed, no more events + return + + response.raise_for_status() + + async for line in response.aiter_lines(): + if line.strip(): # Skip empty lines + try: + event = json.loads(line.replace("\n", "")) + yield event.get("value", {}) + except json.JSONDecodeError as e: + print(f"Failed to parse JSON: {e}, data: {line}") + continue + + except httpx.TimeoutException: + raise TimeoutError( + f"Timeout waiting for events from handler {handler_id}" + ) + except httpx.RequestError as e: + raise ConnectionError(f"Failed to connect to event stream: {e}") + + async def stream_events( + self, + handler_id: str, + event_callback: Callable[[dict[str, Any]], Any] | None = None, + sse: bool = True, + ) -> None: + """ + Stream events from a running handler. + + Args: + handler_id (str): ID of the handler streaming the events + event_callback (Callable[[dict[str, Any]], Any]): Function to call when an event is received from the stream (optional, defaults to None) + sse (bool): Whether to enable server-sent events or not - async def get_workflow_result(self, handler: Handler) -> Any: - response = await get_results_handler_id( - handler_id=handler.handler_id, client=self._client + Returns: + None + """ + callback = event_callback or ( + lambda event: logger.info(f"Processing data: {event}") ) - if isinstance(response, Handler): - if response.status == HandlerStatus.COMPLETED: - return response.result - elif response.status == HandlerStatus.RUNNING: - return response.status.value - else: - return response.error - elif isinstance(response, str): - return response + is_async = inspect.iscoroutinefunction(callback) + if sse: + async for event in self._stream_events_sse(handler_id): + if is_async: + await callback(event) # type: ignore + else: + callback(event) else: - raise ValueError("Response was not properly generated") + async for event in self._stream_events_ndjson(handler_id): + if is_async: + await callback(event) # type: ignore + else: + callback(event) + return None - async def send_workflow_event( + async def send_event( self, - handler: Handler, - event: Union[Event, EventDict, str], - step: Optional[str] = None, - ) -> str: + handler_id: str, + event: Event | dict[str, Any] | str, + step: str | None = None, + ) -> bool: + """ + Send an event to the workflow. + + Args: + handler_id (str): ID of the handler of the running workflow to send the event to + event (Event | dict[str, Any] | str): Event to send, represented as an Event object, a dictionary or a serialized string. + step (str | None): Step to send the event to (optional, defaults to None) + + Returns: + bool: Success status of the send operation + """ if isinstance(event, Event): try: - event = JsonSerializer().serialize(event) + event = event.model_dump_json() except Exception as e: - raise ValueError( - f"It was not possible to serialize the event you want to send because of: {e}" - ) - elif event is EventDict: - event.setdefault("__is_pydantic", True) + raise ValueError(f"Error while serializing the provided event: {e}") + elif isinstance(event, dict): try: event = json.dumps(event) except Exception as e: - raise ValueError( - f"It was not possible to serialize the event you want to send because of: {e}" - ) - if not step: - step: Unset = UNSET - response = await post_events_handler_id( - handler_id=handler.handler_id, - client=self._client, - body=PostEventsHandlerIdBody(event=cast(str, event), step=step), - ) - if isinstance(response, PostEventsHandlerIdResponse200): - return response.status.value - else: - raise ValueError("Response was not properly generated") + raise ValueError(f"Error while serializing the provided event: {e}") + request_body = {"event": event} + if step: + request_body.update({"step": step}) + async with self._get_client() as client: + response = await client.post(f"/events/{handler_id}", json=request_body) + response.raise_for_status() + + return response.json()["status"] == "sent" + + async def get_result(self, handler_id: str) -> Any: + """ + Get the result of the workflow associated with the specified handler ID. + + Args: + handler_id (str): ID of the handler running the workflow + + Returns: + Any: Result of the workflow + """ + async with self._get_client() as client: + response = await client.get(f"/results/{handler_id}") + response.raise_for_status() + + if response.status_code == 202: + return + + return response.json()["result"] diff --git a/src/workflows/client/utils.py b/src/workflows/client/utils.py deleted file mode 100644 index 983d575..0000000 --- a/src/workflows/client/utils.py +++ /dev/null @@ -1,20 +0,0 @@ -from pydantic import BaseModel, Field -from typing import TypedDict, Any, NotRequired - - -class AuthDetails(BaseModel): - token: str = Field(description="Authentication token") - prefix: str = Field( - description="Prefix in the authentication header (defaults to `Bearer`)", - default="Bearer", - ) - auth_header_name: str = Field( - description="Authentication header name (defaults to `Authentication`)", - default="Authentication", - ) - - -class EventDict(TypedDict): - __is_pydantic: NotRequired[bool] - qualified_name: str - value: dict[str, Any] diff --git a/src/workflows/openapi_generated_client/__init__.py b/src/workflows/openapi_generated_client/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/workflows/openapi_generated_client/workflows_api_client/__init__.py b/src/workflows/openapi_generated_client/workflows_api_client/__init__.py deleted file mode 100644 index 62e122c..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -"""A client library for accessing Workflows API""" - -from .client import AuthenticatedClient, Client - -__all__ = ( - "AuthenticatedClient", - "Client", -) diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/__init__.py b/src/workflows/openapi_generated_client/workflows_api_client/api/__init__.py deleted file mode 100644 index 81f9fa2..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Contains methods for accessing the API""" diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/__init__.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/__init__.py deleted file mode 100644 index 2d7c0b2..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Contains endpoint functions for accessing the API""" diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_events_handler_id.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_events_handler_id.py deleted file mode 100644 index e9509da..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_events_handler_id.py +++ /dev/null @@ -1,216 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.get_events_handler_id_response_200 import GetEventsHandlerIdResponse200 -from ...types import UNSET, Response, Unset - - -def _get_kwargs( - handler_id: str, - *, - sse: Union[Unset, bool] = True, -) -> dict[str, Any]: - params: dict[str, Any] = {} - - params["sse"] = sse - - params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - - _kwargs: dict[str, Any] = { - "method": "get", - "url": f"/events/{handler_id}", - "params": params, - } - - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, GetEventsHandlerIdResponse200]]: - if response.status_code == 200: - response_200 = GetEventsHandlerIdResponse200.from_dict(response.text) - - return response_200 - - if response.status_code == 404: - response_404 = cast(Any, None) - return response_404 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, GetEventsHandlerIdResponse200]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], - sse: Union[Unset, bool] = True, -) -> Response[Union[Any, GetEventsHandlerIdResponse200]]: - r"""Stream workflow events - - Streams events produced by a workflow execution. Events are emitted as - newline-delimited JSON by default, or as Server-Sent Events when `sse=true`. - Event data is formatted according to llama-index's json serializer. For - pydantic serializable python types, it returns: - { - \"__is_pydantic\": True, - \"value\": , - \"qualified_name\": - } - - Args: - handler_id (str): - sse (Union[Unset, bool]): Default: True. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, GetEventsHandlerIdResponse200]] - """ - - kwargs = _get_kwargs( - handler_id=handler_id, - sse=sse, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], - sse: Union[Unset, bool] = True, -) -> Optional[Union[Any, GetEventsHandlerIdResponse200]]: - r"""Stream workflow events - - Streams events produced by a workflow execution. Events are emitted as - newline-delimited JSON by default, or as Server-Sent Events when `sse=true`. - Event data is formatted according to llama-index's json serializer. For - pydantic serializable python types, it returns: - { - \"__is_pydantic\": True, - \"value\": , - \"qualified_name\": - } - - Args: - handler_id (str): - sse (Union[Unset, bool]): Default: True. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, GetEventsHandlerIdResponse200] - """ - - return sync_detailed( - handler_id=handler_id, - client=client, - sse=sse, - ).parsed - - -async def asyncio_detailed( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], - sse: Union[Unset, bool] = True, -) -> Response[Union[Any, GetEventsHandlerIdResponse200]]: - r"""Stream workflow events - - Streams events produced by a workflow execution. Events are emitted as - newline-delimited JSON by default, or as Server-Sent Events when `sse=true`. - Event data is formatted according to llama-index's json serializer. For - pydantic serializable python types, it returns: - { - \"__is_pydantic\": True, - \"value\": , - \"qualified_name\": - } - - Args: - handler_id (str): - sse (Union[Unset, bool]): Default: True. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, GetEventsHandlerIdResponse200]] - """ - - kwargs = _get_kwargs( - handler_id=handler_id, - sse=sse, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], - sse: Union[Unset, bool] = True, -) -> Optional[Union[Any, GetEventsHandlerIdResponse200]]: - r"""Stream workflow events - - Streams events produced by a workflow execution. Events are emitted as - newline-delimited JSON by default, or as Server-Sent Events when `sse=true`. - Event data is formatted according to llama-index's json serializer. For - pydantic serializable python types, it returns: - { - \"__is_pydantic\": True, - \"value\": , - \"qualified_name\": - } - - Args: - handler_id (str): - sse (Union[Unset, bool]): Default: True. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, GetEventsHandlerIdResponse200] - """ - - return ( - await asyncio_detailed( - handler_id=handler_id, - client=client, - sse=sse, - ) - ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_handlers.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_handlers.py deleted file mode 100644 index 2f3491b..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_handlers.py +++ /dev/null @@ -1,131 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.handlers_list import HandlersList -from ...types import Response - - -def _get_kwargs() -> dict[str, Any]: - _kwargs: dict[str, Any] = { - "method": "get", - "url": "/handlers", - } - - return _kwargs - - -def _parse_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Optional[HandlersList]: - if response.status_code == 200: - response_200 = HandlersList.from_dict(response.json()) - - return response_200 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response(*, client: Union[AuthenticatedClient, Client], response: httpx.Response) -> Response[HandlersList]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - *, - client: Union[AuthenticatedClient, Client], -) -> Response[HandlersList]: - """Get handlers - - Returns all workflow handlers. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[HandlersList] - """ - - kwargs = _get_kwargs() - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[HandlersList]: - """Get handlers - - Returns all workflow handlers. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - HandlersList - """ - - return sync_detailed( - client=client, - ).parsed - - -async def asyncio_detailed( - *, - client: Union[AuthenticatedClient, Client], -) -> Response[HandlersList]: - """Get handlers - - Returns all workflow handlers. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[HandlersList] - """ - - kwargs = _get_kwargs() - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[HandlersList]: - """Get handlers - - Returns all workflow handlers. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - HandlersList - """ - - return ( - await asyncio_detailed( - client=client, - ) - ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_health.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_health.py deleted file mode 100644 index fabfb32..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_health.py +++ /dev/null @@ -1,135 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.get_health_response_200 import GetHealthResponse200 -from ...types import Response - - -def _get_kwargs() -> dict[str, Any]: - _kwargs: dict[str, Any] = { - "method": "get", - "url": "/health", - } - - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[GetHealthResponse200]: - if response.status_code == 200: - response_200 = GetHealthResponse200.from_dict(response.json()) - - return response_200 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[GetHealthResponse200]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - *, - client: Union[AuthenticatedClient, Client], -) -> Response[GetHealthResponse200]: - """Health check - - Returns the server health status. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[GetHealthResponse200] - """ - - kwargs = _get_kwargs() - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[GetHealthResponse200]: - """Health check - - Returns the server health status. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - GetHealthResponse200 - """ - - return sync_detailed( - client=client, - ).parsed - - -async def asyncio_detailed( - *, - client: Union[AuthenticatedClient, Client], -) -> Response[GetHealthResponse200]: - """Health check - - Returns the server health status. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[GetHealthResponse200] - """ - - kwargs = _get_kwargs() - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[GetHealthResponse200]: - """Health check - - Returns the server health status. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - GetHealthResponse200 - """ - - return ( - await asyncio_detailed( - client=client, - ) - ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_results_handler_id.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_results_handler_id.py deleted file mode 100644 index b1f9bff..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_results_handler_id.py +++ /dev/null @@ -1,172 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.handler import Handler -from ...types import Response - - -def _get_kwargs( - handler_id: str, -) -> dict[str, Any]: - _kwargs: dict[str, Any] = { - "method": "get", - "url": f"/results/{handler_id}", - } - - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, Handler, str]]: - if response.status_code == 200: - response_200 = Handler.from_dict(response.json()) - - return response_200 - - if response.status_code == 202: - response_202 = Handler.from_dict(response.json()) - - return response_202 - - if response.status_code == 404: - response_404 = cast(Any, None) - return response_404 - - if response.status_code == 500: - response_500 = response.text - return response_500 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, Handler, str]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[Any, Handler, str]]: - """Get workflow result - - Returns the final result of an asynchronously started workflow, if available - - Args: - handler_id (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, Handler, str]] - """ - - kwargs = _get_kwargs( - handler_id=handler_id, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[Any, Handler, str]]: - """Get workflow result - - Returns the final result of an asynchronously started workflow, if available - - Args: - handler_id (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, Handler, str] - """ - - return sync_detailed( - handler_id=handler_id, - client=client, - ).parsed - - -async def asyncio_detailed( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[Any, Handler, str]]: - """Get workflow result - - Returns the final result of an asynchronously started workflow, if available - - Args: - handler_id (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, Handler, str]] - """ - - kwargs = _get_kwargs( - handler_id=handler_id, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[Any, Handler, str]]: - """Get workflow result - - Returns the final result of an asynchronously started workflow, if available - - Args: - handler_id (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, Handler, str] - """ - - return ( - await asyncio_detailed( - handler_id=handler_id, - client=client, - ) - ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows.py deleted file mode 100644 index 8d9fd1e..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows.py +++ /dev/null @@ -1,135 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.get_workflows_response_200 import GetWorkflowsResponse200 -from ...types import Response - - -def _get_kwargs() -> dict[str, Any]: - _kwargs: dict[str, Any] = { - "method": "get", - "url": "/workflows", - } - - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[GetWorkflowsResponse200]: - if response.status_code == 200: - response_200 = GetWorkflowsResponse200.from_dict(response.json()) - - return response_200 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[GetWorkflowsResponse200]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - *, - client: Union[AuthenticatedClient, Client], -) -> Response[GetWorkflowsResponse200]: - """List workflows - - Returns the list of registered workflow names. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[GetWorkflowsResponse200] - """ - - kwargs = _get_kwargs() - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[GetWorkflowsResponse200]: - """List workflows - - Returns the list of registered workflow names. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - GetWorkflowsResponse200 - """ - - return sync_detailed( - client=client, - ).parsed - - -async def asyncio_detailed( - *, - client: Union[AuthenticatedClient, Client], -) -> Response[GetWorkflowsResponse200]: - """List workflows - - Returns the list of registered workflow names. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[GetWorkflowsResponse200] - """ - - kwargs = _get_kwargs() - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[GetWorkflowsResponse200]: - """List workflows - - Returns the list of registered workflow names. - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - GetWorkflowsResponse200 - """ - - return ( - await asyncio_detailed( - client=client, - ) - ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_representation.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_representation.py deleted file mode 100644 index c8f649f..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_representation.py +++ /dev/null @@ -1,167 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.get_workflows_name_representation_response_200 import GetWorkflowsNameRepresentationResponse200 -from ...types import Response - - -def _get_kwargs( - name: str, -) -> dict[str, Any]: - _kwargs: dict[str, Any] = { - "method": "get", - "url": f"/workflows/{name}/representation", - } - - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, GetWorkflowsNameRepresentationResponse200]]: - if response.status_code == 200: - response_200 = GetWorkflowsNameRepresentationResponse200.from_dict(response.json()) - - return response_200 - - if response.status_code == 404: - response_404 = cast(Any, None) - return response_404 - - if response.status_code == 500: - response_500 = cast(Any, None) - return response_500 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, GetWorkflowsNameRepresentationResponse200]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - name: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[Any, GetWorkflowsNameRepresentationResponse200]]: - """Get the representation of the workflow - - Get the representation of the workflow as a directed graph in JSON format - - Args: - name (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, GetWorkflowsNameRepresentationResponse200]] - """ - - kwargs = _get_kwargs( - name=name, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - name: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[Any, GetWorkflowsNameRepresentationResponse200]]: - """Get the representation of the workflow - - Get the representation of the workflow as a directed graph in JSON format - - Args: - name (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, GetWorkflowsNameRepresentationResponse200] - """ - - return sync_detailed( - name=name, - client=client, - ).parsed - - -async def asyncio_detailed( - name: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[Any, GetWorkflowsNameRepresentationResponse200]]: - """Get the representation of the workflow - - Get the representation of the workflow as a directed graph in JSON format - - Args: - name (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, GetWorkflowsNameRepresentationResponse200]] - """ - - kwargs = _get_kwargs( - name=name, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - name: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[Any, GetWorkflowsNameRepresentationResponse200]]: - """Get the representation of the workflow - - Get the representation of the workflow as a directed graph in JSON format - - Args: - name (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, GetWorkflowsNameRepresentationResponse200] - """ - - return ( - await asyncio_detailed( - name=name, - client=client, - ) - ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_schema.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_schema.py deleted file mode 100644 index 0cbb6c5..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/get_workflows_name_schema.py +++ /dev/null @@ -1,171 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.get_workflows_name_schema_response_200 import GetWorkflowsNameSchemaResponse200 -from ...types import Response - - -def _get_kwargs( - name: str, -) -> dict[str, Any]: - _kwargs: dict[str, Any] = { - "method": "get", - "url": f"/workflows/{name}/schema", - } - - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, GetWorkflowsNameSchemaResponse200]]: - if response.status_code == 200: - response_200 = GetWorkflowsNameSchemaResponse200.from_dict(response.json()) - - return response_200 - - if response.status_code == 404: - response_404 = cast(Any, None) - return response_404 - - if response.status_code == 500: - response_500 = cast(Any, None) - return response_500 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, GetWorkflowsNameSchemaResponse200]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - name: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[Any, GetWorkflowsNameSchemaResponse200]]: - r"""Get JSON schema for start event - - Gets the JSON schema of the start and stop events from the specified workflow and returns it under - \"start\" (start event) and \"stop\" (stop event) - - Args: - name (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, GetWorkflowsNameSchemaResponse200]] - """ - - kwargs = _get_kwargs( - name=name, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - name: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[Any, GetWorkflowsNameSchemaResponse200]]: - r"""Get JSON schema for start event - - Gets the JSON schema of the start and stop events from the specified workflow and returns it under - \"start\" (start event) and \"stop\" (stop event) - - Args: - name (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, GetWorkflowsNameSchemaResponse200] - """ - - return sync_detailed( - name=name, - client=client, - ).parsed - - -async def asyncio_detailed( - name: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Response[Union[Any, GetWorkflowsNameSchemaResponse200]]: - r"""Get JSON schema for start event - - Gets the JSON schema of the start and stop events from the specified workflow and returns it under - \"start\" (start event) and \"stop\" (stop event) - - Args: - name (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, GetWorkflowsNameSchemaResponse200]] - """ - - kwargs = _get_kwargs( - name=name, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - name: str, - *, - client: Union[AuthenticatedClient, Client], -) -> Optional[Union[Any, GetWorkflowsNameSchemaResponse200]]: - r"""Get JSON schema for start event - - Gets the JSON schema of the start and stop events from the specified workflow and returns it under - \"start\" (start event) and \"stop\" (stop event) - - Args: - name (str): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, GetWorkflowsNameSchemaResponse200] - """ - - return ( - await asyncio_detailed( - name=name, - client=client, - ) - ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_events_handler_id.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_events_handler_id.py deleted file mode 100644 index 9e667e1..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_events_handler_id.py +++ /dev/null @@ -1,193 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.post_events_handler_id_body import PostEventsHandlerIdBody -from ...models.post_events_handler_id_response_200 import PostEventsHandlerIdResponse200 -from ...types import Response - - -def _get_kwargs( - handler_id: str, - *, - body: PostEventsHandlerIdBody, -) -> dict[str, Any]: - headers: dict[str, Any] = {} - - _kwargs: dict[str, Any] = { - "method": "post", - "url": f"/events/{handler_id}", - } - - _kwargs["json"] = body.to_dict() - - headers["Content-Type"] = "application/json" - - _kwargs["headers"] = headers - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, PostEventsHandlerIdResponse200]]: - if response.status_code == 200: - response_200 = PostEventsHandlerIdResponse200.from_dict(response.json()) - - return response_200 - - if response.status_code == 400: - response_400 = cast(Any, None) - return response_400 - - if response.status_code == 404: - response_404 = cast(Any, None) - return response_404 - - if response.status_code == 409: - response_409 = cast(Any, None) - return response_409 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, PostEventsHandlerIdResponse200]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostEventsHandlerIdBody, -) -> Response[Union[Any, PostEventsHandlerIdResponse200]]: - """Send event to workflow - - Sends an event to a running workflow's context. - - Args: - handler_id (str): - body (PostEventsHandlerIdBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, PostEventsHandlerIdResponse200]] - """ - - kwargs = _get_kwargs( - handler_id=handler_id, - body=body, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostEventsHandlerIdBody, -) -> Optional[Union[Any, PostEventsHandlerIdResponse200]]: - """Send event to workflow - - Sends an event to a running workflow's context. - - Args: - handler_id (str): - body (PostEventsHandlerIdBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, PostEventsHandlerIdResponse200] - """ - - return sync_detailed( - handler_id=handler_id, - client=client, - body=body, - ).parsed - - -async def asyncio_detailed( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostEventsHandlerIdBody, -) -> Response[Union[Any, PostEventsHandlerIdResponse200]]: - """Send event to workflow - - Sends an event to a running workflow's context. - - Args: - handler_id (str): - body (PostEventsHandlerIdBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, PostEventsHandlerIdResponse200]] - """ - - kwargs = _get_kwargs( - handler_id=handler_id, - body=body, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - handler_id: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostEventsHandlerIdBody, -) -> Optional[Union[Any, PostEventsHandlerIdResponse200]]: - """Send event to workflow - - Sends an event to a running workflow's context. - - Args: - handler_id (str): - body (PostEventsHandlerIdBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, PostEventsHandlerIdResponse200] - """ - - return ( - await asyncio_detailed( - handler_id=handler_id, - client=client, - body=body, - ) - ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run.py deleted file mode 100644 index 1b2b7cb..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run.py +++ /dev/null @@ -1,201 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.handler import Handler -from ...models.post_workflows_name_run_body import PostWorkflowsNameRunBody -from ...types import Response - - -def _get_kwargs( - name: str, - *, - body: PostWorkflowsNameRunBody, -) -> dict[str, Any]: - headers: dict[str, Any] = {} - - _kwargs: dict[str, Any] = { - "method": "post", - "url": f"/workflows/{name}/run", - } - - _kwargs["json"] = body.to_dict() - - headers["Content-Type"] = "application/json" - - _kwargs["headers"] = headers - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, Handler]]: - if response.status_code == 200: - response_200 = Handler.from_dict(response.json()) - - return response_200 - - if response.status_code == 400: - response_400 = cast(Any, None) - return response_400 - - if response.status_code == 404: - response_404 = cast(Any, None) - return response_404 - - if response.status_code == 500: - response_500 = cast(Any, None) - return response_500 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, Handler]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - name: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostWorkflowsNameRunBody, -) -> Response[Union[Any, Handler]]: - """Run workflow (wait) - - Runs the specified workflow synchronously and returns the final result. - The request body may include an optional serialized start event, an optional - context object, and optional keyword arguments passed to the workflow run. - - Args: - name (str): - body (PostWorkflowsNameRunBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, Handler]] - """ - - kwargs = _get_kwargs( - name=name, - body=body, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - name: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostWorkflowsNameRunBody, -) -> Optional[Union[Any, Handler]]: - """Run workflow (wait) - - Runs the specified workflow synchronously and returns the final result. - The request body may include an optional serialized start event, an optional - context object, and optional keyword arguments passed to the workflow run. - - Args: - name (str): - body (PostWorkflowsNameRunBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, Handler] - """ - - return sync_detailed( - name=name, - client=client, - body=body, - ).parsed - - -async def asyncio_detailed( - name: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostWorkflowsNameRunBody, -) -> Response[Union[Any, Handler]]: - """Run workflow (wait) - - Runs the specified workflow synchronously and returns the final result. - The request body may include an optional serialized start event, an optional - context object, and optional keyword arguments passed to the workflow run. - - Args: - name (str): - body (PostWorkflowsNameRunBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, Handler]] - """ - - kwargs = _get_kwargs( - name=name, - body=body, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - name: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostWorkflowsNameRunBody, -) -> Optional[Union[Any, Handler]]: - """Run workflow (wait) - - Runs the specified workflow synchronously and returns the final result. - The request body may include an optional serialized start event, an optional - context object, and optional keyword arguments passed to the workflow run. - - Args: - name (str): - body (PostWorkflowsNameRunBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, Handler] - """ - - return ( - await asyncio_detailed( - name=name, - client=client, - body=body, - ) - ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run_nowait.py b/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run_nowait.py deleted file mode 100644 index 191ad29..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/api/default/post_workflows_name_run_nowait.py +++ /dev/null @@ -1,193 +0,0 @@ -from http import HTTPStatus -from typing import Any, Optional, Union, cast - -import httpx - -from ... import errors -from ...client import AuthenticatedClient, Client -from ...models.handler import Handler -from ...models.post_workflows_name_run_nowait_body import PostWorkflowsNameRunNowaitBody -from ...types import Response - - -def _get_kwargs( - name: str, - *, - body: PostWorkflowsNameRunNowaitBody, -) -> dict[str, Any]: - headers: dict[str, Any] = {} - - _kwargs: dict[str, Any] = { - "method": "post", - "url": f"/workflows/{name}/run-nowait", - } - - _kwargs["json"] = body.to_dict() - - headers["Content-Type"] = "application/json" - - _kwargs["headers"] = headers - return _kwargs - - -def _parse_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Optional[Union[Any, Handler]]: - if response.status_code == 200: - response_200 = Handler.from_dict(response.json()) - - return response_200 - - if response.status_code == 400: - response_400 = cast(Any, None) - return response_400 - - if response.status_code == 404: - response_404 = cast(Any, None) - return response_404 - - if client.raise_on_unexpected_status: - raise errors.UnexpectedStatus(response.status_code, response.content) - else: - return None - - -def _build_response( - *, client: Union[AuthenticatedClient, Client], response: httpx.Response -) -> Response[Union[Any, Handler]]: - return Response( - status_code=HTTPStatus(response.status_code), - content=response.content, - headers=response.headers, - parsed=_parse_response(client=client, response=response), - ) - - -def sync_detailed( - name: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostWorkflowsNameRunNowaitBody, -) -> Response[Union[Any, Handler]]: - """Run workflow (no-wait) - - Starts the specified workflow asynchronously and returns a handler identifier - which can be used to query results or stream events. - - Args: - name (str): - body (PostWorkflowsNameRunNowaitBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, Handler]] - """ - - kwargs = _get_kwargs( - name=name, - body=body, - ) - - response = client.get_httpx_client().request( - **kwargs, - ) - - return _build_response(client=client, response=response) - - -def sync( - name: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostWorkflowsNameRunNowaitBody, -) -> Optional[Union[Any, Handler]]: - """Run workflow (no-wait) - - Starts the specified workflow asynchronously and returns a handler identifier - which can be used to query results or stream events. - - Args: - name (str): - body (PostWorkflowsNameRunNowaitBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, Handler] - """ - - return sync_detailed( - name=name, - client=client, - body=body, - ).parsed - - -async def asyncio_detailed( - name: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostWorkflowsNameRunNowaitBody, -) -> Response[Union[Any, Handler]]: - """Run workflow (no-wait) - - Starts the specified workflow asynchronously and returns a handler identifier - which can be used to query results or stream events. - - Args: - name (str): - body (PostWorkflowsNameRunNowaitBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Response[Union[Any, Handler]] - """ - - kwargs = _get_kwargs( - name=name, - body=body, - ) - - response = await client.get_async_httpx_client().request(**kwargs) - - return _build_response(client=client, response=response) - - -async def asyncio( - name: str, - *, - client: Union[AuthenticatedClient, Client], - body: PostWorkflowsNameRunNowaitBody, -) -> Optional[Union[Any, Handler]]: - """Run workflow (no-wait) - - Starts the specified workflow asynchronously and returns a handler identifier - which can be used to query results or stream events. - - Args: - name (str): - body (PostWorkflowsNameRunNowaitBody): - - Raises: - errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. - httpx.TimeoutException: If the request takes longer than Client.timeout. - - Returns: - Union[Any, Handler] - """ - - return ( - await asyncio_detailed( - name=name, - client=client, - body=body, - ) - ).parsed diff --git a/src/workflows/openapi_generated_client/workflows_api_client/client.py b/src/workflows/openapi_generated_client/workflows_api_client/client.py deleted file mode 100644 index e80446f..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/client.py +++ /dev/null @@ -1,268 +0,0 @@ -import ssl -from typing import Any, Optional, Union - -import httpx -from attrs import define, evolve, field - - -@define -class Client: - """A class for keeping track of data related to the API - - The following are accepted as keyword arguments and will be used to construct httpx Clients internally: - - ``base_url``: The base URL for the API, all requests are made to a relative path to this URL - - ``cookies``: A dictionary of cookies to be sent with every request - - ``headers``: A dictionary of headers to be sent with every request - - ``timeout``: The maximum amount of a time a request can take. API functions will raise - httpx.TimeoutException if this is exceeded. - - ``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production, - but can be set to False for testing purposes. - - ``follow_redirects``: Whether or not to follow redirects. Default value is False. - - ``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor. - - - Attributes: - raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a - status code that was not documented in the source OpenAPI document. Can also be provided as a keyword - argument to the constructor. - """ - - raise_on_unexpected_status: bool = field(default=False, kw_only=True) - _base_url: str = field(alias="base_url") - _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") - _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True, alias="timeout") - _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True, alias="verify_ssl") - _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") - _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) - - def with_headers(self, headers: dict[str, str]) -> "Client": - """Get a new client matching this one with additional headers""" - if self._client is not None: - self._client.headers.update(headers) - if self._async_client is not None: - self._async_client.headers.update(headers) - return evolve(self, headers={**self._headers, **headers}) - - def with_cookies(self, cookies: dict[str, str]) -> "Client": - """Get a new client matching this one with additional cookies""" - if self._client is not None: - self._client.cookies.update(cookies) - if self._async_client is not None: - self._async_client.cookies.update(cookies) - return evolve(self, cookies={**self._cookies, **cookies}) - - def with_timeout(self, timeout: httpx.Timeout) -> "Client": - """Get a new client matching this one with a new timeout (in seconds)""" - if self._client is not None: - self._client.timeout = timeout - if self._async_client is not None: - self._async_client.timeout = timeout - return evolve(self, timeout=timeout) - - def set_httpx_client(self, client: httpx.Client) -> "Client": - """Manually set the underlying httpx.Client - - **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. - """ - self._client = client - return self - - def get_httpx_client(self) -> httpx.Client: - """Get the underlying httpx.Client, constructing a new one if not previously set""" - if self._client is None: - self._client = httpx.Client( - base_url=self._base_url, - cookies=self._cookies, - headers=self._headers, - timeout=self._timeout, - verify=self._verify_ssl, - follow_redirects=self._follow_redirects, - **self._httpx_args, - ) - return self._client - - def __enter__(self) -> "Client": - """Enter a context manager for self.client—you cannot enter twice (see httpx docs)""" - self.get_httpx_client().__enter__() - return self - - def __exit__(self, *args: Any, **kwargs: Any) -> None: - """Exit a context manager for internal httpx.Client (see httpx docs)""" - self.get_httpx_client().__exit__(*args, **kwargs) - - def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "Client": - """Manually the underlying httpx.AsyncClient - - **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. - """ - self._async_client = async_client - return self - - def get_async_httpx_client(self) -> httpx.AsyncClient: - """Get the underlying httpx.AsyncClient, constructing a new one if not previously set""" - if self._async_client is None: - self._async_client = httpx.AsyncClient( - base_url=self._base_url, - cookies=self._cookies, - headers=self._headers, - timeout=self._timeout, - verify=self._verify_ssl, - follow_redirects=self._follow_redirects, - **self._httpx_args, - ) - return self._async_client - - async def __aenter__(self) -> "Client": - """Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)""" - await self.get_async_httpx_client().__aenter__() - return self - - async def __aexit__(self, *args: Any, **kwargs: Any) -> None: - """Exit a context manager for underlying httpx.AsyncClient (see httpx docs)""" - await self.get_async_httpx_client().__aexit__(*args, **kwargs) - - -@define -class AuthenticatedClient: - """A Client which has been authenticated for use on secured endpoints - - The following are accepted as keyword arguments and will be used to construct httpx Clients internally: - - ``base_url``: The base URL for the API, all requests are made to a relative path to this URL - - ``cookies``: A dictionary of cookies to be sent with every request - - ``headers``: A dictionary of headers to be sent with every request - - ``timeout``: The maximum amount of a time a request can take. API functions will raise - httpx.TimeoutException if this is exceeded. - - ``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production, - but can be set to False for testing purposes. - - ``follow_redirects``: Whether or not to follow redirects. Default value is False. - - ``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor. - - - Attributes: - raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a - status code that was not documented in the source OpenAPI document. Can also be provided as a keyword - argument to the constructor. - token: The token to use for authentication - prefix: The prefix to use for the Authorization header - auth_header_name: The name of the Authorization header - """ - - raise_on_unexpected_status: bool = field(default=False, kw_only=True) - _base_url: str = field(alias="base_url") - _cookies: dict[str, str] = field(factory=dict, kw_only=True, alias="cookies") - _headers: dict[str, str] = field(factory=dict, kw_only=True, alias="headers") - _timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True, alias="timeout") - _verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True, alias="verify_ssl") - _follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects") - _httpx_args: dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args") - _client: Optional[httpx.Client] = field(default=None, init=False) - _async_client: Optional[httpx.AsyncClient] = field(default=None, init=False) - - token: str - prefix: str = "Bearer" - auth_header_name: str = "Authorization" - - def with_headers(self, headers: dict[str, str]) -> "AuthenticatedClient": - """Get a new client matching this one with additional headers""" - if self._client is not None: - self._client.headers.update(headers) - if self._async_client is not None: - self._async_client.headers.update(headers) - return evolve(self, headers={**self._headers, **headers}) - - def with_cookies(self, cookies: dict[str, str]) -> "AuthenticatedClient": - """Get a new client matching this one with additional cookies""" - if self._client is not None: - self._client.cookies.update(cookies) - if self._async_client is not None: - self._async_client.cookies.update(cookies) - return evolve(self, cookies={**self._cookies, **cookies}) - - def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient": - """Get a new client matching this one with a new timeout (in seconds)""" - if self._client is not None: - self._client.timeout = timeout - if self._async_client is not None: - self._async_client.timeout = timeout - return evolve(self, timeout=timeout) - - def set_httpx_client(self, client: httpx.Client) -> "AuthenticatedClient": - """Manually set the underlying httpx.Client - - **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. - """ - self._client = client - return self - - def get_httpx_client(self) -> httpx.Client: - """Get the underlying httpx.Client, constructing a new one if not previously set""" - if self._client is None: - self._headers[self.auth_header_name] = f"{self.prefix} {self.token}" if self.prefix else self.token - self._client = httpx.Client( - base_url=self._base_url, - cookies=self._cookies, - headers=self._headers, - timeout=self._timeout, - verify=self._verify_ssl, - follow_redirects=self._follow_redirects, - **self._httpx_args, - ) - return self._client - - def __enter__(self) -> "AuthenticatedClient": - """Enter a context manager for self.client—you cannot enter twice (see httpx docs)""" - self.get_httpx_client().__enter__() - return self - - def __exit__(self, *args: Any, **kwargs: Any) -> None: - """Exit a context manager for internal httpx.Client (see httpx docs)""" - self.get_httpx_client().__exit__(*args, **kwargs) - - def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "AuthenticatedClient": - """Manually the underlying httpx.AsyncClient - - **NOTE**: This will override any other settings on the client, including cookies, headers, and timeout. - """ - self._async_client = async_client - return self - - def get_async_httpx_client(self) -> httpx.AsyncClient: - """Get the underlying httpx.AsyncClient, constructing a new one if not previously set""" - if self._async_client is None: - self._headers[self.auth_header_name] = f"{self.prefix} {self.token}" if self.prefix else self.token - self._async_client = httpx.AsyncClient( - base_url=self._base_url, - cookies=self._cookies, - headers=self._headers, - timeout=self._timeout, - verify=self._verify_ssl, - follow_redirects=self._follow_redirects, - **self._httpx_args, - ) - return self._async_client - - async def __aenter__(self) -> "AuthenticatedClient": - """Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)""" - await self.get_async_httpx_client().__aenter__() - return self - - async def __aexit__(self, *args: Any, **kwargs: Any) -> None: - """Exit a context manager for underlying httpx.AsyncClient (see httpx docs)""" - await self.get_async_httpx_client().__aexit__(*args, **kwargs) diff --git a/src/workflows/openapi_generated_client/workflows_api_client/errors.py b/src/workflows/openapi_generated_client/workflows_api_client/errors.py deleted file mode 100644 index 5f92e76..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/errors.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Contains shared errors types that can be raised from API functions""" - - -class UnexpectedStatus(Exception): - """Raised by api functions when the response status an undocumented status and Client.raise_on_unexpected_status is True""" - - def __init__(self, status_code: int, content: bytes): - self.status_code = status_code - self.content = content - - super().__init__( - f"Unexpected status code: {status_code}\n\nResponse content:\n{content.decode(errors='ignore')}" - ) - - -__all__ = ["UnexpectedStatus"] diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/__init__.py b/src/workflows/openapi_generated_client/workflows_api_client/models/__init__.py deleted file mode 100644 index ce534b3..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Contains all the data models used in inputs/outputs""" - -from .get_events_handler_id_response_200 import GetEventsHandlerIdResponse200 -from .get_events_handler_id_response_200_value import GetEventsHandlerIdResponse200Value -from .get_health_response_200 import GetHealthResponse200 -from .get_workflows_name_representation_response_200 import GetWorkflowsNameRepresentationResponse200 -from .get_workflows_name_schema_response_200 import GetWorkflowsNameSchemaResponse200 -from .get_workflows_response_200 import GetWorkflowsResponse200 -from .handler import Handler -from .handler_status import HandlerStatus -from .handlers_list import HandlersList -from .post_events_handler_id_body import PostEventsHandlerIdBody -from .post_events_handler_id_response_200 import PostEventsHandlerIdResponse200 -from .post_events_handler_id_response_200_status import PostEventsHandlerIdResponse200Status -from .post_workflows_name_run_body import PostWorkflowsNameRunBody -from .post_workflows_name_run_body_context import PostWorkflowsNameRunBodyContext -from .post_workflows_name_run_body_kwargs import PostWorkflowsNameRunBodyKwargs -from .post_workflows_name_run_body_start_event import PostWorkflowsNameRunBodyStartEvent -from .post_workflows_name_run_nowait_body import PostWorkflowsNameRunNowaitBody -from .post_workflows_name_run_nowait_body_context import PostWorkflowsNameRunNowaitBodyContext -from .post_workflows_name_run_nowait_body_kwargs import PostWorkflowsNameRunNowaitBodyKwargs -from .post_workflows_name_run_nowait_body_start_event import PostWorkflowsNameRunNowaitBodyStartEvent - -__all__ = ( - "GetEventsHandlerIdResponse200", - "GetEventsHandlerIdResponse200Value", - "GetHealthResponse200", - "GetWorkflowsNameRepresentationResponse200", - "GetWorkflowsNameSchemaResponse200", - "GetWorkflowsResponse200", - "Handler", - "HandlersList", - "HandlerStatus", - "PostEventsHandlerIdBody", - "PostEventsHandlerIdResponse200", - "PostEventsHandlerIdResponse200Status", - "PostWorkflowsNameRunBody", - "PostWorkflowsNameRunBodyContext", - "PostWorkflowsNameRunBodyKwargs", - "PostWorkflowsNameRunBodyStartEvent", - "PostWorkflowsNameRunNowaitBody", - "PostWorkflowsNameRunNowaitBodyContext", - "PostWorkflowsNameRunNowaitBodyKwargs", - "PostWorkflowsNameRunNowaitBodyStartEvent", -) diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200.py deleted file mode 100644 index 39d4775..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200.py +++ /dev/null @@ -1,74 +0,0 @@ -from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -if TYPE_CHECKING: - from ..models.get_events_handler_id_response_200_value import GetEventsHandlerIdResponse200Value - - -T = TypeVar("T", bound="GetEventsHandlerIdResponse200") - - -@_attrs_define -class GetEventsHandlerIdResponse200: - """Server-Sent Events stream of event data. - - Attributes: - value (GetEventsHandlerIdResponse200Value): The event value. - qualified_name (str): The qualified name of the event. - """ - - value: "GetEventsHandlerIdResponse200Value" - qualified_name: str - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - value = self.value.to_dict() - - qualified_name = self.qualified_name - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "value": value, - "qualified_name": qualified_name, - } - ) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - from ..models.get_events_handler_id_response_200_value import GetEventsHandlerIdResponse200Value - - d = dict(src_dict) - value = GetEventsHandlerIdResponse200Value.from_dict(d.pop("value")) - - qualified_name = d.pop("qualified_name") - - get_events_handler_id_response_200 = cls( - value=value, - qualified_name=qualified_name, - ) - - get_events_handler_id_response_200.additional_properties = d - return get_events_handler_id_response_200 - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200_value.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200_value.py deleted file mode 100644 index 942d949..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/get_events_handler_id_response_200_value.py +++ /dev/null @@ -1,44 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="GetEventsHandlerIdResponse200Value") - - -@_attrs_define -class GetEventsHandlerIdResponse200Value: - """The event value.""" - - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - get_events_handler_id_response_200_value = cls() - - get_events_handler_id_response_200_value.additional_properties = d - return get_events_handler_id_response_200_value - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_health_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_health_response_200.py deleted file mode 100644 index 3d5af72..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/get_health_response_200.py +++ /dev/null @@ -1,59 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="GetHealthResponse200") - - -@_attrs_define -class GetHealthResponse200: - """ - Attributes: - status (str): Example: healthy. - """ - - status: str - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - status = self.status - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "status": status, - } - ) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - status = d.pop("status") - - get_health_response_200 = cls( - status=status, - ) - - get_health_response_200.additional_properties = d - return get_health_response_200 - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_representation_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_representation_response_200.py deleted file mode 100644 index 1276c1f..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_representation_response_200.py +++ /dev/null @@ -1,59 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="GetWorkflowsNameRepresentationResponse200") - - -@_attrs_define -class GetWorkflowsNameRepresentationResponse200: - """ - Attributes: - graph (Any): the elements of the JSON representation of the workflow - """ - - graph: Any - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - graph = self.graph - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "graph": graph, - } - ) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - graph = d.pop("graph") - - get_workflows_name_representation_response_200 = cls( - graph=graph, - ) - - get_workflows_name_representation_response_200.additional_properties = d - return get_workflows_name_representation_response_200 - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_schema_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_schema_response_200.py deleted file mode 100644 index 13c7d2c..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_name_schema_response_200.py +++ /dev/null @@ -1,67 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="GetWorkflowsNameSchemaResponse200") - - -@_attrs_define -class GetWorkflowsNameSchemaResponse200: - """ - Attributes: - start (Any): JSON schema for the start event - stop (Any): JSON schema for the stop event - """ - - start: Any - stop: Any - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - start = self.start - - stop = self.stop - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "start": start, - "stop": stop, - } - ) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - start = d.pop("start") - - stop = d.pop("stop") - - get_workflows_name_schema_response_200 = cls( - start=start, - stop=stop, - ) - - get_workflows_name_schema_response_200.additional_properties = d - return get_workflows_name_schema_response_200 - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_response_200.py deleted file mode 100644 index 6062dfa..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/get_workflows_response_200.py +++ /dev/null @@ -1,59 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar, cast - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="GetWorkflowsResponse200") - - -@_attrs_define -class GetWorkflowsResponse200: - """ - Attributes: - workflows (list[str]): - """ - - workflows: list[str] - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - workflows = self.workflows - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "workflows": workflows, - } - ) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - workflows = cast(list[str], d.pop("workflows")) - - get_workflows_response_200 = cls( - workflows=workflows, - ) - - get_workflows_response_200.additional_properties = d - return get_workflows_response_200 - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/handler.py b/src/workflows/openapi_generated_client/workflows_api_client/models/handler.py deleted file mode 100644 index 499d0d1..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/handler.py +++ /dev/null @@ -1,197 +0,0 @@ -import datetime -from collections.abc import Mapping -from typing import Any, TypeVar, Union, cast - -from attrs import define as _attrs_define -from attrs import field as _attrs_field -from dateutil.parser import isoparse - -from ..models.handler_status import HandlerStatus -from ..types import UNSET, Unset - -T = TypeVar("T", bound="Handler") - - -@_attrs_define -class Handler: - """ - Attributes: - handler_id (str): - workflow_name (str): - status (HandlerStatus): - started_at (datetime.datetime): - run_id (Union[None, Unset, str]): - updated_at (Union[None, Unset, datetime.datetime]): - completed_at (Union[None, Unset, datetime.datetime]): - error (Union[None, Unset, str]): - result (Union[Unset, Any]): Workflow result value - """ - - handler_id: str - workflow_name: str - status: HandlerStatus - started_at: datetime.datetime - run_id: Union[None, Unset, str] = UNSET - updated_at: Union[None, Unset, datetime.datetime] = UNSET - completed_at: Union[None, Unset, datetime.datetime] = UNSET - error: Union[None, Unset, str] = UNSET - result: Union[Unset, Any] = UNSET - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - handler_id = self.handler_id - - workflow_name = self.workflow_name - - status = self.status.value - - started_at = self.started_at.isoformat() - - run_id: Union[None, Unset, str] - if isinstance(self.run_id, Unset): - run_id = UNSET - else: - run_id = self.run_id - - updated_at: Union[None, Unset, str] - if isinstance(self.updated_at, Unset): - updated_at = UNSET - elif isinstance(self.updated_at, datetime.datetime): - updated_at = self.updated_at.isoformat() - else: - updated_at = self.updated_at - - completed_at: Union[None, Unset, str] - if isinstance(self.completed_at, Unset): - completed_at = UNSET - elif isinstance(self.completed_at, datetime.datetime): - completed_at = self.completed_at.isoformat() - else: - completed_at = self.completed_at - - error: Union[None, Unset, str] - if isinstance(self.error, Unset): - error = UNSET - else: - error = self.error - - result = self.result - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "handler_id": handler_id, - "workflow_name": workflow_name, - "status": status, - "started_at": started_at, - } - ) - if run_id is not UNSET: - field_dict["run_id"] = run_id - if updated_at is not UNSET: - field_dict["updated_at"] = updated_at - if completed_at is not UNSET: - field_dict["completed_at"] = completed_at - if error is not UNSET: - field_dict["error"] = error - if result is not UNSET: - field_dict["result"] = result - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - handler_id = d.pop("handler_id") - - workflow_name = d.pop("workflow_name") - - status = HandlerStatus(d.pop("status")) - - started_at = isoparse(d.pop("started_at")) - - def _parse_run_id(data: object) -> Union[None, Unset, str]: - if data is None: - return data - if isinstance(data, Unset): - return data - return cast(Union[None, Unset, str], data) - - run_id = _parse_run_id(d.pop("run_id", UNSET)) - - def _parse_updated_at(data: object) -> Union[None, Unset, datetime.datetime]: - if data is None: - return data - if isinstance(data, Unset): - return data - try: - if not isinstance(data, str): - raise TypeError() - updated_at_type_0 = isoparse(data) - - return updated_at_type_0 - except: # noqa: E722 - pass - return cast(Union[None, Unset, datetime.datetime], data) - - updated_at = _parse_updated_at(d.pop("updated_at", UNSET)) - - def _parse_completed_at(data: object) -> Union[None, Unset, datetime.datetime]: - if data is None: - return data - if isinstance(data, Unset): - return data - try: - if not isinstance(data, str): - raise TypeError() - completed_at_type_0 = isoparse(data) - - return completed_at_type_0 - except: # noqa: E722 - pass - return cast(Union[None, Unset, datetime.datetime], data) - - completed_at = _parse_completed_at(d.pop("completed_at", UNSET)) - - def _parse_error(data: object) -> Union[None, Unset, str]: - if data is None: - return data - if isinstance(data, Unset): - return data - return cast(Union[None, Unset, str], data) - - error = _parse_error(d.pop("error", UNSET)) - - result = d.pop("result", UNSET) - - handler = cls( - handler_id=handler_id, - workflow_name=workflow_name, - status=status, - started_at=started_at, - run_id=run_id, - updated_at=updated_at, - completed_at=completed_at, - error=error, - result=result, - ) - - handler.additional_properties = d - return handler - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/handler_status.py b/src/workflows/openapi_generated_client/workflows_api_client/models/handler_status.py deleted file mode 100644 index 2473289..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/handler_status.py +++ /dev/null @@ -1,10 +0,0 @@ -from enum import Enum - - -class HandlerStatus(str, Enum): - COMPLETED = "completed" - FAILED = "failed" - RUNNING = "running" - - def __str__(self) -> str: - return str(self.value) diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/handlers_list.py b/src/workflows/openapi_generated_client/workflows_api_client/models/handlers_list.py deleted file mode 100644 index ed0cc69..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/handlers_list.py +++ /dev/null @@ -1,73 +0,0 @@ -from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -if TYPE_CHECKING: - from ..models.handler import Handler - - -T = TypeVar("T", bound="HandlersList") - - -@_attrs_define -class HandlersList: - """ - Attributes: - handlers (list['Handler']): - """ - - handlers: list["Handler"] - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - handlers = [] - for handlers_item_data in self.handlers: - handlers_item = handlers_item_data.to_dict() - handlers.append(handlers_item) - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "handlers": handlers, - } - ) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - from ..models.handler import Handler - - d = dict(src_dict) - handlers = [] - _handlers = d.pop("handlers") - for handlers_item_data in _handlers: - handlers_item = Handler.from_dict(handlers_item_data) - - handlers.append(handlers_item) - - handlers_list = cls( - handlers=handlers, - ) - - handlers_list.additional_properties = d - return handlers_list - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_body.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_body.py deleted file mode 100644 index 87c9e8c..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_body.py +++ /dev/null @@ -1,70 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar, Union - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -from ..types import UNSET, Unset - -T = TypeVar("T", bound="PostEventsHandlerIdBody") - - -@_attrs_define -class PostEventsHandlerIdBody: - """ - Attributes: - event (str): Serialized event in JSON format. - step (Union[Unset, str]): Optional target step name. If not provided, event is sent to all steps. - """ - - event: str - step: Union[Unset, str] = UNSET - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - event = self.event - - step = self.step - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "event": event, - } - ) - if step is not UNSET: - field_dict["step"] = step - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - event = d.pop("event") - - step = d.pop("step", UNSET) - - post_events_handler_id_body = cls( - event=event, - step=step, - ) - - post_events_handler_id_body.additional_properties = d - return post_events_handler_id_body - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200.py deleted file mode 100644 index f8406b0..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200.py +++ /dev/null @@ -1,61 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -from ..models.post_events_handler_id_response_200_status import PostEventsHandlerIdResponse200Status - -T = TypeVar("T", bound="PostEventsHandlerIdResponse200") - - -@_attrs_define -class PostEventsHandlerIdResponse200: - """ - Attributes: - status (PostEventsHandlerIdResponse200Status): - """ - - status: PostEventsHandlerIdResponse200Status - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - status = self.status.value - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update( - { - "status": status, - } - ) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - status = PostEventsHandlerIdResponse200Status(d.pop("status")) - - post_events_handler_id_response_200 = cls( - status=status, - ) - - post_events_handler_id_response_200.additional_properties = d - return post_events_handler_id_response_200 - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200_status.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200_status.py deleted file mode 100644 index 75857f2..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_events_handler_id_response_200_status.py +++ /dev/null @@ -1,8 +0,0 @@ -from enum import Enum - - -class PostEventsHandlerIdResponse200Status(str, Enum): - SENT = "sent" - - def __str__(self) -> str: - return str(self.value) diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body.py deleted file mode 100644 index 92fab2c..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body.py +++ /dev/null @@ -1,109 +0,0 @@ -from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -from ..types import UNSET, Unset - -if TYPE_CHECKING: - from ..models.post_workflows_name_run_body_context import PostWorkflowsNameRunBodyContext - from ..models.post_workflows_name_run_body_kwargs import PostWorkflowsNameRunBodyKwargs - from ..models.post_workflows_name_run_body_start_event import PostWorkflowsNameRunBodyStartEvent - - -T = TypeVar("T", bound="PostWorkflowsNameRunBody") - - -@_attrs_define -class PostWorkflowsNameRunBody: - """ - Attributes: - start_event (Union[Unset, PostWorkflowsNameRunBodyStartEvent]): Plain JSON object representing the start event - (e.g., {"message": "..."}). - context (Union[Unset, PostWorkflowsNameRunBodyContext]): Serialized workflow Context. - kwargs (Union[Unset, PostWorkflowsNameRunBodyKwargs]): Additional keyword arguments for the workflow. - """ - - start_event: Union[Unset, "PostWorkflowsNameRunBodyStartEvent"] = UNSET - context: Union[Unset, "PostWorkflowsNameRunBodyContext"] = UNSET - kwargs: Union[Unset, "PostWorkflowsNameRunBodyKwargs"] = UNSET - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - start_event: Union[Unset, dict[str, Any]] = UNSET - if not isinstance(self.start_event, Unset): - start_event = self.start_event.to_dict() - - context: Union[Unset, dict[str, Any]] = UNSET - if not isinstance(self.context, Unset): - context = self.context.to_dict() - - kwargs: Union[Unset, dict[str, Any]] = UNSET - if not isinstance(self.kwargs, Unset): - kwargs = self.kwargs.to_dict() - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update({}) - if start_event is not UNSET: - field_dict["start_event"] = start_event - if context is not UNSET: - field_dict["context"] = context - if kwargs is not UNSET: - field_dict["kwargs"] = kwargs - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - from ..models.post_workflows_name_run_body_context import PostWorkflowsNameRunBodyContext - from ..models.post_workflows_name_run_body_kwargs import PostWorkflowsNameRunBodyKwargs - from ..models.post_workflows_name_run_body_start_event import PostWorkflowsNameRunBodyStartEvent - - d = dict(src_dict) - _start_event = d.pop("start_event", UNSET) - start_event: Union[Unset, PostWorkflowsNameRunBodyStartEvent] - if isinstance(_start_event, Unset): - start_event = UNSET - else: - start_event = PostWorkflowsNameRunBodyStartEvent.from_dict(_start_event) - - _context = d.pop("context", UNSET) - context: Union[Unset, PostWorkflowsNameRunBodyContext] - if isinstance(_context, Unset): - context = UNSET - else: - context = PostWorkflowsNameRunBodyContext.from_dict(_context) - - _kwargs = d.pop("kwargs", UNSET) - kwargs: Union[Unset, PostWorkflowsNameRunBodyKwargs] - if isinstance(_kwargs, Unset): - kwargs = UNSET - else: - kwargs = PostWorkflowsNameRunBodyKwargs.from_dict(_kwargs) - - post_workflows_name_run_body = cls( - start_event=start_event, - context=context, - kwargs=kwargs, - ) - - post_workflows_name_run_body.additional_properties = d - return post_workflows_name_run_body - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_context.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_context.py deleted file mode 100644 index 36380d5..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_context.py +++ /dev/null @@ -1,44 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="PostWorkflowsNameRunBodyContext") - - -@_attrs_define -class PostWorkflowsNameRunBodyContext: - """Serialized workflow Context.""" - - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - post_workflows_name_run_body_context = cls() - - post_workflows_name_run_body_context.additional_properties = d - return post_workflows_name_run_body_context - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_kwargs.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_kwargs.py deleted file mode 100644 index f39b723..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_kwargs.py +++ /dev/null @@ -1,44 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="PostWorkflowsNameRunBodyKwargs") - - -@_attrs_define -class PostWorkflowsNameRunBodyKwargs: - """Additional keyword arguments for the workflow.""" - - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - post_workflows_name_run_body_kwargs = cls() - - post_workflows_name_run_body_kwargs.additional_properties = d - return post_workflows_name_run_body_kwargs - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_start_event.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_start_event.py deleted file mode 100644 index 4ccd9ca..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_body_start_event.py +++ /dev/null @@ -1,44 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="PostWorkflowsNameRunBodyStartEvent") - - -@_attrs_define -class PostWorkflowsNameRunBodyStartEvent: - """Plain JSON object representing the start event (e.g., {"message": "..."}).""" - - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - post_workflows_name_run_body_start_event = cls() - - post_workflows_name_run_body_start_event.additional_properties = d - return post_workflows_name_run_body_start_event - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body.py deleted file mode 100644 index 8d06954..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body.py +++ /dev/null @@ -1,109 +0,0 @@ -from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar, Union - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -from ..types import UNSET, Unset - -if TYPE_CHECKING: - from ..models.post_workflows_name_run_nowait_body_context import PostWorkflowsNameRunNowaitBodyContext - from ..models.post_workflows_name_run_nowait_body_kwargs import PostWorkflowsNameRunNowaitBodyKwargs - from ..models.post_workflows_name_run_nowait_body_start_event import PostWorkflowsNameRunNowaitBodyStartEvent - - -T = TypeVar("T", bound="PostWorkflowsNameRunNowaitBody") - - -@_attrs_define -class PostWorkflowsNameRunNowaitBody: - """ - Attributes: - start_event (Union[Unset, PostWorkflowsNameRunNowaitBodyStartEvent]): Plain JSON object representing the start - event (e.g., {"message": "..."}). - context (Union[Unset, PostWorkflowsNameRunNowaitBodyContext]): Serialized workflow Context. - kwargs (Union[Unset, PostWorkflowsNameRunNowaitBodyKwargs]): Additional keyword arguments for the workflow. - """ - - start_event: Union[Unset, "PostWorkflowsNameRunNowaitBodyStartEvent"] = UNSET - context: Union[Unset, "PostWorkflowsNameRunNowaitBodyContext"] = UNSET - kwargs: Union[Unset, "PostWorkflowsNameRunNowaitBodyKwargs"] = UNSET - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - start_event: Union[Unset, dict[str, Any]] = UNSET - if not isinstance(self.start_event, Unset): - start_event = self.start_event.to_dict() - - context: Union[Unset, dict[str, Any]] = UNSET - if not isinstance(self.context, Unset): - context = self.context.to_dict() - - kwargs: Union[Unset, dict[str, Any]] = UNSET - if not isinstance(self.kwargs, Unset): - kwargs = self.kwargs.to_dict() - - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - field_dict.update({}) - if start_event is not UNSET: - field_dict["start_event"] = start_event - if context is not UNSET: - field_dict["context"] = context - if kwargs is not UNSET: - field_dict["kwargs"] = kwargs - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - from ..models.post_workflows_name_run_nowait_body_context import PostWorkflowsNameRunNowaitBodyContext - from ..models.post_workflows_name_run_nowait_body_kwargs import PostWorkflowsNameRunNowaitBodyKwargs - from ..models.post_workflows_name_run_nowait_body_start_event import PostWorkflowsNameRunNowaitBodyStartEvent - - d = dict(src_dict) - _start_event = d.pop("start_event", UNSET) - start_event: Union[Unset, PostWorkflowsNameRunNowaitBodyStartEvent] - if isinstance(_start_event, Unset): - start_event = UNSET - else: - start_event = PostWorkflowsNameRunNowaitBodyStartEvent.from_dict(_start_event) - - _context = d.pop("context", UNSET) - context: Union[Unset, PostWorkflowsNameRunNowaitBodyContext] - if isinstance(_context, Unset): - context = UNSET - else: - context = PostWorkflowsNameRunNowaitBodyContext.from_dict(_context) - - _kwargs = d.pop("kwargs", UNSET) - kwargs: Union[Unset, PostWorkflowsNameRunNowaitBodyKwargs] - if isinstance(_kwargs, Unset): - kwargs = UNSET - else: - kwargs = PostWorkflowsNameRunNowaitBodyKwargs.from_dict(_kwargs) - - post_workflows_name_run_nowait_body = cls( - start_event=start_event, - context=context, - kwargs=kwargs, - ) - - post_workflows_name_run_nowait_body.additional_properties = d - return post_workflows_name_run_nowait_body - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_context.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_context.py deleted file mode 100644 index b483d67..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_context.py +++ /dev/null @@ -1,44 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="PostWorkflowsNameRunNowaitBodyContext") - - -@_attrs_define -class PostWorkflowsNameRunNowaitBodyContext: - """Serialized workflow Context.""" - - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - post_workflows_name_run_nowait_body_context = cls() - - post_workflows_name_run_nowait_body_context.additional_properties = d - return post_workflows_name_run_nowait_body_context - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_kwargs.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_kwargs.py deleted file mode 100644 index 1754fd9..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_kwargs.py +++ /dev/null @@ -1,44 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="PostWorkflowsNameRunNowaitBodyKwargs") - - -@_attrs_define -class PostWorkflowsNameRunNowaitBodyKwargs: - """Additional keyword arguments for the workflow.""" - - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - post_workflows_name_run_nowait_body_kwargs = cls() - - post_workflows_name_run_nowait_body_kwargs.additional_properties = d - return post_workflows_name_run_nowait_body_kwargs - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_start_event.py b/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_start_event.py deleted file mode 100644 index e0ab722..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/models/post_workflows_name_run_nowait_body_start_event.py +++ /dev/null @@ -1,44 +0,0 @@ -from collections.abc import Mapping -from typing import Any, TypeVar - -from attrs import define as _attrs_define -from attrs import field as _attrs_field - -T = TypeVar("T", bound="PostWorkflowsNameRunNowaitBodyStartEvent") - - -@_attrs_define -class PostWorkflowsNameRunNowaitBodyStartEvent: - """Plain JSON object representing the start event (e.g., {"message": "..."}).""" - - additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) - - def to_dict(self) -> dict[str, Any]: - field_dict: dict[str, Any] = {} - field_dict.update(self.additional_properties) - - return field_dict - - @classmethod - def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - d = dict(src_dict) - post_workflows_name_run_nowait_body_start_event = cls() - - post_workflows_name_run_nowait_body_start_event.additional_properties = d - return post_workflows_name_run_nowait_body_start_event - - @property - def additional_keys(self) -> list[str]: - return list(self.additional_properties.keys()) - - def __getitem__(self, key: str) -> Any: - return self.additional_properties[key] - - def __setitem__(self, key: str, value: Any) -> None: - self.additional_properties[key] = value - - def __delitem__(self, key: str) -> None: - del self.additional_properties[key] - - def __contains__(self, key: str) -> bool: - return key in self.additional_properties diff --git a/src/workflows/openapi_generated_client/workflows_api_client/py.typed b/src/workflows/openapi_generated_client/workflows_api_client/py.typed deleted file mode 100644 index 7632ecf..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561 diff --git a/src/workflows/openapi_generated_client/workflows_api_client/types.py b/src/workflows/openapi_generated_client/workflows_api_client/types.py deleted file mode 100644 index 1b96ca4..0000000 --- a/src/workflows/openapi_generated_client/workflows_api_client/types.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Contains some shared types for properties""" - -from collections.abc import Mapping, MutableMapping -from http import HTTPStatus -from typing import IO, BinaryIO, Generic, Literal, Optional, TypeVar, Union - -from attrs import define - - -class Unset: - def __bool__(self) -> Literal[False]: - return False - - -UNSET: Unset = Unset() - -# The types that `httpx.Client(files=)` can accept, copied from that library. -FileContent = Union[IO[bytes], bytes, str] -FileTypes = Union[ - # (filename, file (or bytes), content_type) - tuple[Optional[str], FileContent, Optional[str]], - # (filename, file (or bytes), content_type, headers) - tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], -] -RequestFiles = list[tuple[str, FileTypes]] - - -@define -class File: - """Contains information for file uploads""" - - payload: BinaryIO - file_name: Optional[str] = None - mime_type: Optional[str] = None - - def to_tuple(self) -> FileTypes: - """Return a tuple representation that httpx will accept for multipart/form-data""" - return self.file_name, self.payload, self.mime_type - - -T = TypeVar("T") - - -@define -class Response(Generic[T]): - """A response from an endpoint""" - - status_code: HTTPStatus - content: bytes - headers: MutableMapping[str, str] - parsed: Optional[T] - - -__all__ = ["UNSET", "File", "FileTypes", "RequestFiles", "Response", "Unset"] diff --git a/src/workflows/server/server.py b/src/workflows/server/server.py index 15f8e4d..997a7bc 100644 --- a/src/workflows/server/server.py +++ b/src/workflows/server/server.py @@ -404,6 +404,8 @@ async def _get_events_schema(self, request: Request) -> JSONResponse: schema: type: string description: Registered workflow name. + requestBody: + required: false responses: 200: description: JSON schema successfully retrieved for start event @@ -453,6 +455,8 @@ async def _get_workflow_representation(self, request: Request) -> JSONResponse: schema: type: string description: Registered workflow name. + requestBody: + required: false responses: 200: description: JSON representation successfully retrieved From 62f136d08ec97a35141a21041c45e31099d51324 Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Sat, 27 Sep 2025 22:38:34 +0200 Subject: [PATCH 06/13] chore: implement suggestions from first review --- examples/client/client.py | 32 +++-------- examples/client/server.py | 4 +- src/workflows/client/client.py | 97 ++++------------------------------ src/workflows/server/server.py | 4 +- 4 files changed, 22 insertions(+), 115 deletions(-) diff --git a/examples/client/client.py b/examples/client/client.py index 64b93eb..7bdb5ec 100644 --- a/examples/client/client.py +++ b/examples/client/client.py @@ -1,8 +1,8 @@ import asyncio -from workflows.client.client import WorkflowClient +from workflows.client import WorkflowClient -from workflows.events import StartEvent, HumanResponseEvent +from workflows.events import StartEvent from pydantic import PrivateAttr, model_validator, Field from typing import Literal, Callable, Self @@ -21,10 +21,6 @@ def assign_function(self) -> Self: return self -class HumanApprovedResult(HumanResponseEvent): - approved: bool - - async def main() -> None: client = WorkflowClient(protocol="http", host="localhost", port=8000) workflows = await client.list_workflows() @@ -36,26 +32,18 @@ async def main() -> None: ping_time = await client.ping() print("==== PING TIME ====") print(ping_time, "ms") - handler_id = await client.run_workflow_nowait( + handler = await client.run_workflow_nowait( "add_or_subtract", start_event=InputNumbers(a=1, b=3, operation="sum"), context=None, ) + handler_id = handler["handler_id"] print("==== STARTING THE WORKFLOW ===") print(f"Workflow running with handler ID: {handler_id}") print("=== STREAMING EVENTS ===") - def handle_event(event_data: dict) -> None: - print(f"Received event: {event_data}") - - # Stream events in background - stream_task = asyncio.create_task( - client.stream_events( - handler_id=handler_id, - event_callback=handle_event, - sse=True, # Use Server-Sent Events - ) - ) + async for event in client.get_workflow_events(handler_id=handler_id): + print("Received data:", event) # Poll for result result = None @@ -69,15 +57,7 @@ def handle_event(event_data: dict) -> None: print(f"Error: {e}") await asyncio.sleep(1) - # Cancel streaming task - stream_task.cancel() - try: - await stream_task - except asyncio.CancelledError: - pass - print(f"Final result: {result}") - return result if __name__ == "__main__": diff --git a/examples/client/server.py b/examples/client/server.py index dbb578b..bd99694 100644 --- a/examples/client/server.py +++ b/examples/client/server.py @@ -1,5 +1,5 @@ from workflows import Workflow, step, Context -from workflows.events import StartEvent, StopEvent, InputRequiredEvent +from workflows.events import StartEvent, StopEvent, Event from pydantic import Field from workflows.server import WorkflowServer @@ -12,7 +12,7 @@ class InputNumbers(StartEvent): operation: Literal["sum", "subtraction"] = Field(default="sum") -class CalculationEvent(InputRequiredEvent): +class CalculationEvent(Event): result: int diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index 0861754..d06f91c 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -1,9 +1,8 @@ import httpx import time import json -import inspect -from typing import Literal, Any, Union, Callable, AsyncGenerator, AsyncIterator +from typing import Literal, Any, Union, AsyncGenerator, AsyncIterator from contextlib import asynccontextmanager from logging import getLogger from workflows.events import StartEvent, Event @@ -131,7 +130,7 @@ async def run_workflow_nowait( start_event: Union[StartEvent, dict[str, Any], None] = None, context: Union[Context, dict[str, Any], None] = None, **kwargs: Any, - ) -> str: + ) -> dict[str, Any]: """ Run the workflow in the background. @@ -141,7 +140,7 @@ async def run_workflow_nowait( **kwargs: Any number of keyword arguments that would be passed on as additional keyword arguments to the workflow. Returns: - str: ID of the handler running the workflow + dict[str, Any]: JSON representation of the handler running the workflow """ if isinstance(start_event, StartEvent): try: @@ -167,59 +166,20 @@ async def run_workflow_nowait( response.raise_for_status() - return response.json()["handler_id"] + return response.json() - async def _stream_events_sse( + async def get_workflow_events( self, handler_id: str, ) -> AsyncGenerator[dict[str, Any], None]: """ - Stream events using Server-Sent Events format - """ - url = f"/events/{handler_id}?sse=true" - - async with self._get_client() as client: - try: - async with client.stream( - "GET", - url, - ) as response: - # Handle different response codes - if response.status_code == 404: - raise ValueError("Handler not found") - elif response.status_code == 204: - # Handler completed, no more events - return # type: ignore + Stream events as they are produced by the workflow. - response.raise_for_status() - - async for line in response.aiter_lines(): - if line.startswith("data: "): - # Extract JSON from SSE data line - json_data = line[6:] # Remove 'data: ' prefix - if json_data.strip(): # Skip empty data lines - try: - event = json.loads(json_data.replace("\n", "")) - yield event.get("value", {}) - except json.JSONDecodeError as e: - print( - f"Failed to parse JSON: {e}, data: {json_data}" - ) - continue - - except httpx.TimeoutException: - raise TimeoutError( - f"Timeout waiting for events from handler {handler_id}" - ) - except httpx.RequestError as e: - raise ConnectionError(f"Failed to connect to event stream: {e}") + Args: + handler_id (str): ID of the handler running the workflow - async def _stream_events_ndjson( - self, - handler_id: str, - ) -> AsyncGenerator[dict[str, Any], None]: - """ - Stream events using newline-delimited JSON format + Returns: + AsyncGenerator[dict[str, Any], None]: Generator for the events that are streamed in the form of dictionaries. """ url = f"/events/{handler_id}?sse=false" @@ -239,7 +199,7 @@ async def _stream_events_ndjson( if line.strip(): # Skip empty lines try: event = json.loads(line.replace("\n", "")) - yield event.get("value", {}) + yield event except json.JSONDecodeError as e: print(f"Failed to parse JSON: {e}, data: {line}") continue @@ -251,41 +211,6 @@ async def _stream_events_ndjson( except httpx.RequestError as e: raise ConnectionError(f"Failed to connect to event stream: {e}") - async def stream_events( - self, - handler_id: str, - event_callback: Callable[[dict[str, Any]], Any] | None = None, - sse: bool = True, - ) -> None: - """ - Stream events from a running handler. - - Args: - handler_id (str): ID of the handler streaming the events - event_callback (Callable[[dict[str, Any]], Any]): Function to call when an event is received from the stream (optional, defaults to None) - sse (bool): Whether to enable server-sent events or not - - Returns: - None - """ - callback = event_callback or ( - lambda event: logger.info(f"Processing data: {event}") - ) - is_async = inspect.iscoroutinefunction(callback) - if sse: - async for event in self._stream_events_sse(handler_id): - if is_async: - await callback(event) # type: ignore - else: - callback(event) - else: - async for event in self._stream_events_ndjson(handler_id): - if is_async: - await callback(event) # type: ignore - else: - callback(event) - return None - async def send_event( self, handler_id: str, diff --git a/src/workflows/server/server.py b/src/workflows/server/server.py index 997a7bc..c2bfdda 100644 --- a/src/workflows/server/server.py +++ b/src/workflows/server/server.py @@ -981,7 +981,9 @@ def to_dict(self) -> HandlerDict: if self.completed_at is not None else None, error=self.error, - result=self.result, + result=self.result.model_dump() + if isinstance(self.result, StopEvent) + else self.result, ) @property From c70cd9fa5c836e60e5b44f31598c4f5ae5737c84 Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Thu, 9 Oct 2025 15:26:12 +0200 Subject: [PATCH 07/13] chore: client refactor + tests --- examples/client/client.py | 3 - pyproject.toml | 2 +- src/workflows/client/client.py | 112 +++++++++++++++--------------- src/workflows/server/server.py | 9 +-- src/workflows/server/utils.py | 29 ++++++++ tests/client/__init__.py | 0 tests/client/greeting_workflow.py | 35 ++++++++++ tests/client/hitl_workflow.py | 45 ++++++++++++ tests/client/test_client.py | 79 +++++++++++++++++++++ tests/server/test_utils.py | 42 ++++++++++- 10 files changed, 288 insertions(+), 68 deletions(-) create mode 100644 tests/client/__init__.py create mode 100644 tests/client/greeting_workflow.py create mode 100644 tests/client/hitl_workflow.py create mode 100644 tests/client/test_client.py diff --git a/examples/client/client.py b/examples/client/client.py index 7bdb5ec..24146f8 100644 --- a/examples/client/client.py +++ b/examples/client/client.py @@ -29,9 +29,6 @@ async def main() -> None: is_healthy = await client.is_healthy() print("==== HEALTH CHECK ====") print("Healthy" if is_healthy else "Not Healty :(") - ping_time = await client.ping() - print("==== PING TIME ====") - print(ping_time, "ms") handler = await client.run_workflow_nowait( "add_or_subtract", start_event=InputNumbers(a=1, b=3, operation="sum"), diff --git a/pyproject.toml b/pyproject.toml index 69d46dd..6f590e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ dependencies = [ [project.optional-dependencies] server = ["starlette>=0.39.0", "uvicorn>=0.32.0"] -client = ["httpx>=0.28.1"] +client = ["httpx>=0.28.1,<1"] [tool.basedpyright] typeCheckingMode = "standard" diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index d06f91c..11e5785 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -1,15 +1,13 @@ import httpx -import time import json from typing import Literal, Any, Union, AsyncGenerator, AsyncIterator from contextlib import asynccontextmanager -from logging import getLogger from workflows.events import StartEvent, Event from workflows import Context - - -logger = getLogger(__name__) +from workflows.server.server import HandlerDict +from workflows.server.utils import serdes_event +from workflows.types import RunResultT class WorkflowClient: @@ -19,19 +17,22 @@ def __init__( host: str | None = None, port: int | None = None, timeout: int | None = None, + httpx_kwargs: dict[str, Any] | None = None, ): # TODO: middleware-related logic self.protocol = protocol or "http" self.host = host or "localhost" self.port = port or 8000 self.timeout = timeout or 600 + self.httpx_kwargs = httpx_kwargs or {} # TODO: add some basic TLS/verification and auth features @asynccontextmanager - async def _get_client(self) -> AsyncIterator: + async def _get_client(self) -> AsyncIterator[httpx.AsyncClient]: async with httpx.AsyncClient( base_url=self.protocol + "://" + self.host + ":" + str(self.port), timeout=self.timeout, + **self.httpx_kwargs, ) as client: yield client @@ -44,27 +45,8 @@ async def is_healthy(self) -> bool: """ async with self._get_client() as client: response = await client.get("/health") - if response.status_code == 200: + response.raise_for_status() return response.json().get("status", "") == "healthy" - return False - - async def ping(self) -> float: - """ - Ping the workflow and get the latency in milliseconds - - Returns: - float: latency in milliseconds - """ - async with self._get_client() as client: - start = time.time() - response = await client.get("/health") - if response.status_code == 200: - end = time.time() - return (end - start) * 1000 - else: - raise httpx.ConnectError( - f"Failed to establish a connection with server running on: {self.protocol}://{self.host}:{self.port}" - ) async def list_workflows(self) -> list[str]: """ @@ -83,9 +65,8 @@ async def list_workflows(self) -> list[str]: async def run_workflow( self, workflow_name: str, - start_event: Union[StartEvent, dict[str, Any], None] = None, + start_event: Union[StartEvent, dict[str, Any], str, None] = None, context: Union[Context, dict[str, Any], None] = None, - **kwargs: Any, ) -> Any: """ Run the workflow and wait until completion. @@ -93,14 +74,13 @@ async def run_workflow( Args: start_event (Union[StartEvent, dict[str, Any], None]): start event class or dictionary representation (optional, defaults to None and get passed as an empty dictionary if not provided). context: Context or serialized representation of it (optional, defaults to None if not provided) - **kwargs: Any number of keyword arguments that would be passed on as additional keyword arguments to the workflow. Returns: Any: Result of the workflow """ - if isinstance(start_event, StartEvent): + if start_event is not None: try: - start_event = start_event.model_dump() + start_event = serdes_event(start_event) except Exception as e: raise ValueError( f"Impossible to serialize the start event because of: {e}" @@ -111,9 +91,8 @@ async def run_workflow( except Exception as e: raise ValueError(f"Impossible to serialize the context because of: {e}") request_body = { - "start_event": start_event or {}, + "start_event": start_event or "", "context": context or {}, - "additional_kwargs": kwargs, } async with self._get_client() as client: response = await client.post( @@ -129,7 +108,6 @@ async def run_workflow_nowait( workflow_name: str, start_event: Union[StartEvent, dict[str, Any], None] = None, context: Union[Context, dict[str, Any], None] = None, - **kwargs: Any, ) -> dict[str, Any]: """ Run the workflow in the background. @@ -137,14 +115,13 @@ async def run_workflow_nowait( Args: start_event (Union[StartEvent, dict[str, Any], None]): start event class or dictionary representation (optional, defaults to None and get passed as an empty dictionary if not provided). context: Context or serialized representation of it (optional, defaults to None if not provided) - **kwargs: Any number of keyword arguments that would be passed on as additional keyword arguments to the workflow. Returns: dict[str, Any]: JSON representation of the handler running the workflow """ - if isinstance(start_event, StartEvent): + if start_event is not None: try: - start_event = start_event.model_dump() + start_event = serdes_event(start_event) except Exception as e: raise ValueError( f"Impossible to serialize the start event because of: {e}" @@ -155,9 +132,8 @@ async def run_workflow_nowait( except Exception as e: raise ValueError(f"Impossible to serialize the context because of: {e}") request_body = { - "start_event": start_event or {}, + "start_event": start_event or "{}", "context": context or {}, - "additional_kwargs": kwargs, } async with self._get_client() as client: response = await client.post( @@ -171,21 +147,34 @@ async def run_workflow_nowait( async def get_workflow_events( self, handler_id: str, + include_internal_events: bool = False, + lock_timeout: float = 1, ) -> AsyncGenerator[dict[str, Any], None]: """ Stream events as they are produced by the workflow. Args: handler_id (str): ID of the handler running the workflow + include_internal_events (bool): Include internal workflow events. Defaults to False. + lock_timeout (float): Timeout (in seconds) for acquiring the lock to iterate over the events. Returns: AsyncGenerator[dict[str, Any], None]: Generator for the events that are streamed in the form of dictionaries. """ - url = f"/events/{handler_id}?sse=false" + incl_inter = "true" if include_internal_events else "false" + url = f"/events/{handler_id}" async with self._get_client() as client: try: - async with client.stream("GET", url) as response: + async with client.stream( + "GET", + url, + params={ + "sse": "false", + "include_internal": incl_inter, + "acquire_timeout": lock_timeout, + }, + ) as response: # Handle different response codes if response.status_code == 404: raise ValueError("Handler not found") @@ -228,16 +217,10 @@ async def send_event( Returns: bool: Success status of the send operation """ - if isinstance(event, Event): - try: - event = event.model_dump_json() - except Exception as e: - raise ValueError(f"Error while serializing the provided event: {e}") - elif isinstance(event, dict): - try: - event = json.dumps(event) - except Exception as e: - raise ValueError(f"Error while serializing the provided event: {e}") + try: + event = serdes_event(event) + except Exception as e: + raise ValueError(f"Error while serializing the provided event: {e}") request_body = {"event": event} if step: request_body.update({"step": step}) @@ -247,21 +230,40 @@ async def send_event( return response.json()["status"] == "sent" - async def get_result(self, handler_id: str) -> Any: + async def get_result( + self, handler_id: str, as_handler: bool = False + ) -> Union[RunResultT, None, HandlerDict]: """ Get the result of the workflow associated with the specified handler ID. Args: handler_id (str): ID of the handler running the workflow + as_handler (bool): Return the workflow handler. Defaults to False. Returns: - Any: Result of the workflow + Any: Result of the workflow, if available, or workflow handler (when `as_handler` is set to `True`) """ async with self._get_client() as client: response = await client.get(f"/results/{handler_id}") response.raise_for_status() if response.status_code == 202: - return + return None - return response.json()["result"] + if not as_handler: + return response.json()["result"] + else: + return response.json() + + async def get_handlers(self) -> list[HandlerDict]: + """ + Get all the workflow handlers. + + Returns: + list[HandlerDict]: List of dictionaries representing workflow handlers. + """ + async with self._get_client() as client: + response = await client.get("/handlers") + response.raise_for_status() + + return response.json()["handlers"] diff --git a/src/workflows/server/server.py b/src/workflows/server/server.py index 3af1391..172392d 100644 --- a/src/workflows/server/server.py +++ b/src/workflows/server/server.py @@ -45,7 +45,7 @@ Status, ) from workflows.types import RunResultT -from .utils import nanoid +from .utils import nanoid, serdes_event from .representation_utils import _extract_workflow_structure logger = logging.getLogger() @@ -1038,13 +1038,8 @@ async def _extract_run_params( # Extract custom StartEvent if present start_event = None if start_event_data is not None: - serializer = JsonSerializer() try: - start_event = ( - serializer.deserialize(start_event_data) - if isinstance(start_event_data, str) - else serializer.deserialize_value(start_event_data) - ) + start_event = serdes_event(start_event_data, False) if isinstance(start_event, dict): start_event = workflow.start_event_class.model_validate( start_event diff --git a/src/workflows/server/utils.py b/src/workflows/server/utils.py index ff2beab..57e7564 100644 --- a/src/workflows/server/utils.py +++ b/src/workflows/server/utils.py @@ -4,9 +4,38 @@ import secrets import string +from pydantic import BaseModel +from typing import Union, Any +from workflows.context.serializers import JsonSerializer + alphabet = string.ascii_letters + string.digits # A-Z, a-z, 0-9 def nanoid(size: int = 10) -> str: """Returns a unique identifier with the format 'kY2xP9hTnQ'.""" return "".join(secrets.choice(alphabet) for _ in range(size)) + + +def serdes_event( + event: Union[dict[str, Any], BaseModel, str], serialize: bool = True +) -> Any: + """ + Serialize or deserialize a start event. + + Args: + event (Union[dict[str, Any], BaseModel, str]): Input event + serialize (bool): Serialize if true, deserialize if false. + """ + serializer = JsonSerializer() + if serialize: + if isinstance(event, (BaseModel, dict)): + event = serializer.serialize(event) + else: + event = serializer.serialize_value(event) + return event + else: + if isinstance(event, str): + event = serializer.deserialize(event) + else: + event = serializer.deserialize_value(event) + return event diff --git a/tests/client/__init__.py b/tests/client/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/client/greeting_workflow.py b/tests/client/greeting_workflow.py new file mode 100644 index 0000000..4c35d45 --- /dev/null +++ b/tests/client/greeting_workflow.py @@ -0,0 +1,35 @@ +import random + +from workflows import Workflow, step, Context +from workflows.events import StartEvent, Event, StopEvent + + +class InputEvent(StartEvent): + greeting: str + name: str + + +class GreetEvent(Event): + greeting: str + exclamation_marks: int + + +class OutputEvent(StopEvent): + greeting: str + + +class GreetingWorkflow(Workflow): + @step + async def first_step(self, ev: InputEvent, ctx: Context) -> GreetEvent: + ctx.write_event_to_stream(ev) + return GreetEvent( + greeting=f"{ev.greeting} {ev.name}", exclamation_marks=random.randint(1, 10) + ) + + @step + async def second_step(self, ev: GreetEvent, ctx: Context) -> OutputEvent: + ctx.write_event_to_stream(ev) + return OutputEvent(greeting=f"{ev.greeting}{'!' * ev.exclamation_marks}") + + +greeting_wf = GreetingWorkflow() diff --git a/tests/client/hitl_workflow.py b/tests/client/hitl_workflow.py new file mode 100644 index 0000000..ecfb241 --- /dev/null +++ b/tests/client/hitl_workflow.py @@ -0,0 +1,45 @@ +import asyncio + +from workflows import Workflow, step +from workflows.context import Context +from workflows.events import ( + Event, + StartEvent, + StopEvent, + InputRequiredEvent, + HumanResponseEvent, +) + + +class RequestEvent(InputRequiredEvent): + prompt: str + + +class ResponseEvent(HumanResponseEvent): + response: str + + +class HiddenEvent(Event): + prompt: str + + +class OutEvent(StopEvent): + output: str + + +class HumanInTheLoopWorkflow(Workflow): + @step + async def prompt_human(self, ev: StartEvent) -> RequestEvent: + await asyncio.sleep(1) + return RequestEvent(prompt="What is your name?") + + @step + async def greet_human(self, ctx: Context, ev: ResponseEvent) -> OutEvent: + follow_up = f"Nice to meet you, {ev.response}! Did you get my hidden message?" + resp = await ctx.wait_for_event( + ResponseEvent, waiter_event=HiddenEvent(prompt=follow_up) + ) + return OutEvent(output=f"Got it!: {resp.response}") + + +hitl_wf = HumanInTheLoopWorkflow() diff --git a/tests/client/test_client.py b/tests/client/test_client.py new file mode 100644 index 0000000..4567552 --- /dev/null +++ b/tests/client/test_client.py @@ -0,0 +1,79 @@ +import pytest + +from httpx import ASGITransport +from workflows.server.server import WorkflowServer +from workflows.client import WorkflowClient +from .greeting_workflow import greeting_wf, InputEvent, OutputEvent +from .hitl_workflow import hitl_wf + + +@pytest.fixture() +def server() -> WorkflowServer: + ws = WorkflowServer() + ws.add_workflow(name="greeting", workflow=greeting_wf) + ws.add_workflow(name="human", workflow=hitl_wf) + return ws + + +@pytest.mark.asyncio +async def test_client(server: WorkflowServer) -> None: + transport = ASGITransport(server.app) + client = WorkflowClient(httpx_kwargs={"transport": transport}) + is_healthy = await client.is_healthy() + assert is_healthy + wfs = await client.list_workflows() + assert isinstance(wfs, list) + assert len(wfs) == 2 + assert wfs[0] == "greeting" + assert wfs[1] == "human" + handler = await client.run_workflow_nowait( + "greeting", start_event=InputEvent(greeting="hello", name="John") + ) + assert isinstance(handler, dict) + handler_id = handler["handler_id"] + events = [] + async for event in client.get_workflow_events(handler_id=handler_id): + assert isinstance(event, dict) + events.append(event) + assert len(events) == 3 + result = await client.get_result(handler_id) + assert result is not None + res = OutputEvent.model_validate(result) + assert "John" in res.greeting and "!" in res.greeting and "hello" in res.greeting + result = await client.get_result(handler_id, as_handler=True) + assert isinstance(result, dict) + assert result["handler_id"] == handler_id + handlers = await client.get_handlers() + assert isinstance(handlers, list) + assert len(handlers) == 1 + assert handlers[0] == result + result = await client.run_workflow( + "greeting", start_event=InputEvent(greeting="hello", name="John") + ) + assert result is not None + res = OutputEvent.model_validate(result) + assert "John" in res.greeting and "!" in res.greeting and "hello" in res.greeting + handler = await client.run_workflow_nowait( + "greeting", start_event=InputEvent(greeting="hello", name="John") + ) + handler_id = handler["handler_id"] + events = [] + async for event in client.get_workflow_events( + handler_id=handler_id, include_internal_events=True + ): + assert isinstance(event, dict) + events.append(event) + assert len(events) > 3 + # handler = await client.run_workflow_nowait("human") + # handler_id = handler["handler_id"] + # async for event in client.get_workflow_events(handler_id=handler_id): + # if event.get("qualified_name", "") == "tests.client.hitl_workflow.RequestEvent": + # sent_event = await client.send_event(handler_id=handler_id, event=ResponseEvent(response="John")) + # assert sent_event + # if event.get("qualified_name", "") == "tests.client.hitl_workflow.HiddenEvent": + # sent_event = await client.send_event(handler_id=handler_id, event=ResponseEvent(response="Yes")) + # assert sent_event + # result = await client.get_result(handler_id) + # assert result is not None + # res = OutEvent.model_validate(result) + # assert res.output == "Got it!: Yes" diff --git a/tests/server/test_utils.py b/tests/server/test_utils.py index da732cb..fed9635 100644 --- a/tests/server/test_utils.py +++ b/tests/server/test_utils.py @@ -1,8 +1,10 @@ # SPDX-License-Identifier: MIT # Copyright (c) 2025 LlamaIndex Inc. +import pytest - -from workflows.server.utils import nanoid +from typing import Union +from workflows.server.utils import nanoid, serdes_event +from workflows.events import StartEvent def test_nanoid_default_length() -> None: @@ -44,3 +46,39 @@ def test_nanoid_negative_length() -> None: result = nanoid(-10) assert result == "" + + +def test_serdes_event_serialization() -> None: + event: Union[str, dict, StartEvent] = {"hello": "world"} + ser_event = serdes_event(event) + assert isinstance(ser_event, str) + assert ser_event == '{"hello": "world"}' + event = StartEvent(message="hello") # type: ignore + ser_event = serdes_event(event) + assert isinstance(ser_event, str) + assert ( + ser_event + == '{"__is_pydantic": true, "value": {"_data": {"message": "hello"}}, "qualified_name": "workflows.events.StartEvent"}' + ) + event = '{"hello": "world"}' + ser_event = serdes_event(event) + assert isinstance(ser_event, str) + assert ser_event == '{"hello": "world"}' + event = {"type": str} + with pytest.raises(ValueError): + serdes_event(event) + + +def test_serdes_event_deserialization() -> None: + event: Union[str, dict] = '{"hello": "world"}' + deser_event = serdes_event(event, serialize=False) + assert isinstance(deser_event, dict) + assert deser_event == {"hello": "world"} + event = '{"__is_pydantic": true, "value": {"_data": {"message": "hello"}}, "qualified_name": "workflows.events.StartEvent"}' + deser_event = serdes_event(event, serialize=False) + assert isinstance(deser_event, StartEvent) + assert deser_event == StartEvent(message="hello") # type: ignore + event = {"hello": "world"} + ser_event = serdes_event(event, serialize=False) + assert isinstance(ser_event, dict) + assert ser_event == {"hello": "world"} From 87c2cc3f3119f87f5b83c96248cbc096885687d9 Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Thu, 9 Oct 2025 15:29:50 +0200 Subject: [PATCH 08/13] make 3.9 happy again? --- src/workflows/client/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index 11e5785..7d9cb30 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -1,7 +1,7 @@ import httpx import json -from typing import Literal, Any, Union, AsyncGenerator, AsyncIterator +from typing import Literal, Any, Union, AsyncGenerator, AsyncIterator, Optional from contextlib import asynccontextmanager from workflows.events import StartEvent, Event from workflows import Context @@ -13,7 +13,7 @@ class WorkflowClient: def __init__( self, - protocol: Literal["http", "https"] | None = None, + protocol: Optional[Literal["http", "https"]] = None, host: str | None = None, port: int | None = None, timeout: int | None = None, From f2bc1bab57ac69e262fa7e548a5f459ec3a52c8a Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Thu, 9 Oct 2025 15:42:50 +0200 Subject: [PATCH 09/13] fix: 3.9 typing quirks --- src/workflows/client/client.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index 7d9cb30..4abe0ea 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -14,10 +14,10 @@ class WorkflowClient: def __init__( self, protocol: Optional[Literal["http", "https"]] = None, - host: str | None = None, - port: int | None = None, - timeout: int | None = None, - httpx_kwargs: dict[str, Any] | None = None, + host: Optional[str] = None, + port: Optional[int] = None, + timeout: Optional[int] = None, + httpx_kwargs: Optional[dict[str, Any]] = None, ): # TODO: middleware-related logic self.protocol = protocol or "http" @@ -203,8 +203,8 @@ async def get_workflow_events( async def send_event( self, handler_id: str, - event: Event | dict[str, Any] | str, - step: str | None = None, + event: Union[Event, dict[str, Any], str], + step: Optional[str] = None, ) -> bool: """ Send an event to the workflow. @@ -212,7 +212,7 @@ async def send_event( Args: handler_id (str): ID of the handler of the running workflow to send the event to event (Event | dict[str, Any] | str): Event to send, represented as an Event object, a dictionary or a serialized string. - step (str | None): Step to send the event to (optional, defaults to None) + step (Optional[str]): Step to send the event to (optional, defaults to None) Returns: bool: Success status of the send operation From 2e7adbfbc80fbc28d1fb0eaf5fa89b7751ec61e2 Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Thu, 9 Oct 2025 16:02:21 +0200 Subject: [PATCH 10/13] chore: add handler_id option to run methods --- src/workflows/client/client.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index 4abe0ea..bc922ad 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -65,6 +65,7 @@ async def list_workflows(self) -> list[str]: async def run_workflow( self, workflow_name: str, + handler_id: Optional[str] = None, start_event: Union[StartEvent, dict[str, Any], str, None] = None, context: Union[Context, dict[str, Any], None] = None, ) -> Any: @@ -74,6 +75,7 @@ async def run_workflow( Args: start_event (Union[StartEvent, dict[str, Any], None]): start event class or dictionary representation (optional, defaults to None and get passed as an empty dictionary if not provided). context: Context or serialized representation of it (optional, defaults to None if not provided) + handler_id (Optional[str]): Workflow handler identifier to continue from a previous completed run. Returns: Any: Result of the workflow @@ -94,6 +96,8 @@ async def run_workflow( "start_event": start_event or "", "context": context or {}, } + if handler_id: + request_body["handler_id"] = handler_id async with self._get_client() as client: response = await client.post( f"/workflows/{workflow_name}/run", json=request_body @@ -106,6 +110,7 @@ async def run_workflow( async def run_workflow_nowait( self, workflow_name: str, + handler_id: Optional[str] = None, start_event: Union[StartEvent, dict[str, Any], None] = None, context: Union[Context, dict[str, Any], None] = None, ) -> dict[str, Any]: @@ -115,6 +120,7 @@ async def run_workflow_nowait( Args: start_event (Union[StartEvent, dict[str, Any], None]): start event class or dictionary representation (optional, defaults to None and get passed as an empty dictionary if not provided). context: Context or serialized representation of it (optional, defaults to None if not provided) + handler_id (Optional[str]): Workflow handler identifier to continue from a previous completed run. Returns: dict[str, Any]: JSON representation of the handler running the workflow @@ -135,6 +141,8 @@ async def run_workflow_nowait( "start_event": start_event or "{}", "context": context or {}, } + if handler_id: + request_body["handler_id"] = handler_id async with self._get_client() as client: response = await client.post( f"/workflows/{workflow_name}/run-nowait", json=request_body From a73f7018ff417777570c3ea7da7ccea17d236631 Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Fri, 10 Oct 2025 12:23:20 +0200 Subject: [PATCH 11/13] chore: remove human in the loop from tests; feat: add working hitl example; fix: use serialized StartEvent as input when no start event is provided --- .../{client.py => base/workflow_client.py} | 0 .../{server.py => base/workflow_server.py} | 0 .../human_in_the_loop/workflow_client_hitl.py | 42 ++++++++++++++++ .../human_in_the_loop/workflow_server_hitl.py | 48 +++++++++++++++++++ src/workflows/client/client.py | 2 +- tests/client/hitl_workflow.py | 45 ----------------- tests/client/test_client.py | 18 +------ 7 files changed, 92 insertions(+), 63 deletions(-) rename examples/client/{client.py => base/workflow_client.py} (100%) rename examples/client/{server.py => base/workflow_server.py} (100%) create mode 100644 examples/client/human_in_the_loop/workflow_client_hitl.py create mode 100644 examples/client/human_in_the_loop/workflow_server_hitl.py delete mode 100644 tests/client/hitl_workflow.py diff --git a/examples/client/client.py b/examples/client/base/workflow_client.py similarity index 100% rename from examples/client/client.py rename to examples/client/base/workflow_client.py diff --git a/examples/client/server.py b/examples/client/base/workflow_server.py similarity index 100% rename from examples/client/server.py rename to examples/client/base/workflow_server.py diff --git a/examples/client/human_in_the_loop/workflow_client_hitl.py b/examples/client/human_in_the_loop/workflow_client_hitl.py new file mode 100644 index 0000000..815e634 --- /dev/null +++ b/examples/client/human_in_the_loop/workflow_client_hitl.py @@ -0,0 +1,42 @@ +import asyncio + +from workflows.client import WorkflowClient +from workflows.events import ( + StopEvent, + HumanResponseEvent, +) + + +class ResponseEvent(HumanResponseEvent): + response: str + + +class OutEvent(StopEvent): + output: str + + +async def main() -> None: + client = WorkflowClient(protocol="http", port=8000, host="localhost") + handler = await client.run_workflow_nowait("human") + handler_id = handler["handler_id"] + print(handler_id) + async for event in client.get_workflow_events(handler_id=handler_id): + if "RequestEvent" in event.get("qualified_name", ""): + print( + "Workflow is requiring human input:", + event.get("value", {}).get("prompt", ""), + ) + name = input("Reply here: ") + sent_event = await client.send_event( + handler_id=handler_id, + event=ResponseEvent(response=name.capitalize().strip()), + ) + msg = "Event has been sent" if sent_event else "Event failed to send" + print(msg) + result = await client.get_result(handler_id) + res = OutEvent.model_validate(result) + print("Received final message:", res.output) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/client/human_in_the_loop/workflow_server_hitl.py b/examples/client/human_in_the_loop/workflow_server_hitl.py new file mode 100644 index 0000000..29bb472 --- /dev/null +++ b/examples/client/human_in_the_loop/workflow_server_hitl.py @@ -0,0 +1,48 @@ +from workflows import Workflow, step +from workflows.context import Context +from workflows.events import ( + StartEvent, + StopEvent, + InputRequiredEvent, + HumanResponseEvent, +) +from workflows.server import WorkflowServer + + +class RequestEvent(InputRequiredEvent): + prompt: str + + +class ResponseEvent(HumanResponseEvent): + response: str + + +class OutEvent(StopEvent): + output: str + + +class HumanInTheLoopWorkflow(Workflow): + @step + async def prompt_human(self, ev: StartEvent, ctx: Context) -> RequestEvent: + return RequestEvent(prompt="What is your name?") + + @step + async def greet_human(self, ev: ResponseEvent) -> OutEvent: + return OutEvent(output=f"Hello, {ev.response}") + + +async def main() -> None: + server = WorkflowServer() + server.add_workflow("human", HumanInTheLoopWorkflow(timeout=1000)) + try: + await server.serve("localhost", 8000) + except KeyboardInterrupt: + return + except Exception as e: + raise ValueError(f"An error occurred: {e}") + + +if __name__ == "__main__": + import asyncio + + asyncio.run(main()) diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index bc922ad..ae57514 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -138,7 +138,7 @@ async def run_workflow_nowait( except Exception as e: raise ValueError(f"Impossible to serialize the context because of: {e}") request_body = { - "start_event": start_event or "{}", + "start_event": start_event or serdes_event(StartEvent()), "context": context or {}, } if handler_id: diff --git a/tests/client/hitl_workflow.py b/tests/client/hitl_workflow.py deleted file mode 100644 index ecfb241..0000000 --- a/tests/client/hitl_workflow.py +++ /dev/null @@ -1,45 +0,0 @@ -import asyncio - -from workflows import Workflow, step -from workflows.context import Context -from workflows.events import ( - Event, - StartEvent, - StopEvent, - InputRequiredEvent, - HumanResponseEvent, -) - - -class RequestEvent(InputRequiredEvent): - prompt: str - - -class ResponseEvent(HumanResponseEvent): - response: str - - -class HiddenEvent(Event): - prompt: str - - -class OutEvent(StopEvent): - output: str - - -class HumanInTheLoopWorkflow(Workflow): - @step - async def prompt_human(self, ev: StartEvent) -> RequestEvent: - await asyncio.sleep(1) - return RequestEvent(prompt="What is your name?") - - @step - async def greet_human(self, ctx: Context, ev: ResponseEvent) -> OutEvent: - follow_up = f"Nice to meet you, {ev.response}! Did you get my hidden message?" - resp = await ctx.wait_for_event( - ResponseEvent, waiter_event=HiddenEvent(prompt=follow_up) - ) - return OutEvent(output=f"Got it!: {resp.response}") - - -hitl_wf = HumanInTheLoopWorkflow() diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 4567552..83baaf0 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -4,14 +4,12 @@ from workflows.server.server import WorkflowServer from workflows.client import WorkflowClient from .greeting_workflow import greeting_wf, InputEvent, OutputEvent -from .hitl_workflow import hitl_wf @pytest.fixture() def server() -> WorkflowServer: ws = WorkflowServer() ws.add_workflow(name="greeting", workflow=greeting_wf) - ws.add_workflow(name="human", workflow=hitl_wf) return ws @@ -23,9 +21,8 @@ async def test_client(server: WorkflowServer) -> None: assert is_healthy wfs = await client.list_workflows() assert isinstance(wfs, list) - assert len(wfs) == 2 + assert len(wfs) == 1 assert wfs[0] == "greeting" - assert wfs[1] == "human" handler = await client.run_workflow_nowait( "greeting", start_event=InputEvent(greeting="hello", name="John") ) @@ -64,16 +61,3 @@ async def test_client(server: WorkflowServer) -> None: assert isinstance(event, dict) events.append(event) assert len(events) > 3 - # handler = await client.run_workflow_nowait("human") - # handler_id = handler["handler_id"] - # async for event in client.get_workflow_events(handler_id=handler_id): - # if event.get("qualified_name", "") == "tests.client.hitl_workflow.RequestEvent": - # sent_event = await client.send_event(handler_id=handler_id, event=ResponseEvent(response="John")) - # assert sent_event - # if event.get("qualified_name", "") == "tests.client.hitl_workflow.HiddenEvent": - # sent_event = await client.send_event(handler_id=handler_id, event=ResponseEvent(response="Yes")) - # assert sent_event - # result = await client.get_result(handler_id) - # assert result is not None - # res = OutEvent.model_validate(result) - # assert res.output == "Got it!: Yes" From b7fbfbaf3d3a3f2bf0d73dae5c7e377c8d85ce4d Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Fri, 10 Oct 2025 15:44:14 -0400 Subject: [PATCH 12/13] Add type validators, and return full types to support extensability (#134) * Add type validators, and return full types to support extensability * Test edits * switch to pydantic * fix deserialization duplication --- examples/client/base/workflow_client.py | 36 ++------ examples/client/base/workflow_server.py | 20 +---- .../human_in_the_loop/workflow_client_hitl.py | 5 +- src/workflows/client/client.py | 79 ++++++++-------- src/workflows/protocol/__init__.py | 90 +++++++++++++++++++ src/workflows/server/representation_utils.py | 42 +++++---- src/workflows/server/server.py | 77 +++++++++------- src/workflows/server/utils.py | 29 ------ tests/client/test_client.py | 80 +++++++++++++---- tests/server/test_handler_serialization.py | 10 ++- tests/server/test_utils.py | 41 +-------- uv.lock | 8 +- 12 files changed, 292 insertions(+), 225 deletions(-) create mode 100644 src/workflows/protocol/__init__.py diff --git a/examples/client/base/workflow_client.py b/examples/client/base/workflow_client.py index 24146f8..52ec85b 100644 --- a/examples/client/base/workflow_client.py +++ b/examples/client/base/workflow_client.py @@ -3,22 +3,15 @@ from workflows.client import WorkflowClient from workflows.events import StartEvent -from pydantic import PrivateAttr, model_validator, Field +from pydantic import Field -from typing import Literal, Callable, Self +from typing import Literal class InputNumbers(StartEvent): a: int b: int - operation: Literal["sum", "subtraction"] = Field(default="sum") - _function: Callable[[int, int], int] = PrivateAttr(default=lambda a, b: a + b) - - @model_validator(mode="after") - def assign_function(self) -> Self: - if self.operation == "subtraction": - self._function = lambda a, b: a - b - return self + operation: Literal["addition", "subtraction"] = Field(default="addition") async def main() -> None: @@ -26,35 +19,22 @@ async def main() -> None: workflows = await client.list_workflows() print("===== AVAILABLE WORKFLOWS ====") print(workflows) - is_healthy = await client.is_healthy() - print("==== HEALTH CHECK ====") - print("Healthy" if is_healthy else "Not Healty :(") + await client.is_healthy() # will raise an exception if the server is not healthy handler = await client.run_workflow_nowait( "add_or_subtract", - start_event=InputNumbers(a=1, b=3, operation="sum"), + start_event=InputNumbers(a=1, b=3, operation="addition"), context=None, ) - handler_id = handler["handler_id"] + handler_id = handler.handler_id print("==== STARTING THE WORKFLOW ===") print(f"Workflow running with handler ID: {handler_id}") print("=== STREAMING EVENTS ===") async for event in client.get_workflow_events(handler_id=handler_id): print("Received data:", event) + result = await client.get_result(handler_id) - # Poll for result - result = None - while result is None: - try: - result = await client.get_result(handler_id) - if result is not None: - break - await asyncio.sleep(1) - except Exception as e: - print(f"Error: {e}") - await asyncio.sleep(1) - - print(f"Final result: {result}") + print(f"Final result: {result.result} (status: {result.status})") if __name__ == "__main__": diff --git a/examples/client/base/workflow_server.py b/examples/client/base/workflow_server.py index bd99694..d178d35 100644 --- a/examples/client/base/workflow_server.py +++ b/examples/client/base/workflow_server.py @@ -9,7 +9,7 @@ class InputNumbers(StartEvent): a: int b: int - operation: Literal["sum", "subtraction"] = Field(default="sum") + operation: Literal["addition", "subtraction"] = Field(default="addition") class CalculationEvent(Event): @@ -22,24 +22,12 @@ class OutputEvent(StopEvent): class AddOrSubtractWorkflow(Workflow): @step - async def first_step( - self, ev: InputNumbers, ctx: Context - ) -> CalculationEvent | None: + async def first_step(self, ev: InputNumbers, ctx: Context) -> OutputEvent | None: ctx.write_event_to_stream(ev) - result = ev.a + ev.b if ev.operation == "sum" else ev.a - ev.b - async with ctx.store.edit_state() as state: - state.operation = ev.operation - state.a = ev.a - state.b = ev.b - state.result = result + result = ev.a + ev.b if ev.operation == "addition" else ev.a - ev.b ctx.write_event_to_stream(CalculationEvent(result=result)) - return CalculationEvent(result=result) - - @step - async def second_step(self, ev: CalculationEvent, ctx: Context) -> OutputEvent: - state = await ctx.store.get_state() return OutputEvent( - message=f"You approved the result from your operation ({state.operation}) between {state.a} and {state.b}: {ev.result}" + message=f"You {ev.operation} operation ({ev.operation}) between {ev.a} and {ev.b}: {result}" ) diff --git a/examples/client/human_in_the_loop/workflow_client_hitl.py b/examples/client/human_in_the_loop/workflow_client_hitl.py index 815e634..8b64eca 100644 --- a/examples/client/human_in_the_loop/workflow_client_hitl.py +++ b/examples/client/human_in_the_loop/workflow_client_hitl.py @@ -18,7 +18,7 @@ class OutEvent(StopEvent): async def main() -> None: client = WorkflowClient(protocol="http", port=8000, host="localhost") handler = await client.run_workflow_nowait("human") - handler_id = handler["handler_id"] + handler_id = handler.handler_id print(handler_id) async for event in client.get_workflow_events(handler_id=handler_id): if "RequestEvent" in event.get("qualified_name", ""): @@ -34,7 +34,8 @@ async def main() -> None: msg = "Event has been sent" if sent_event else "Event failed to send" print(msg) result = await client.get_result(handler_id) - res = OutEvent.model_validate(result) + print(f"Workflow complete with status: {result.status})") + res = OutEvent.model_validate(result.result) print("Received final message:", res.output) diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index ae57514..4eb7a7f 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -3,11 +3,16 @@ from typing import Literal, Any, Union, AsyncGenerator, AsyncIterator, Optional from contextlib import asynccontextmanager +from workflows.context.serializers import JsonSerializer from workflows.events import StartEvent, Event from workflows import Context -from workflows.server.server import HandlerDict -from workflows.server.utils import serdes_event -from workflows.types import RunResultT +from workflows.protocol import ( + HandlerData, + HandlersListResponse, + HealthResponse, + SendEventResponse, + WorkflowsListResponse, +) class WorkflowClient: @@ -36,7 +41,7 @@ async def _get_client(self) -> AsyncIterator[httpx.AsyncClient]: ) as client: yield client - async def is_healthy(self) -> bool: + async def is_healthy(self) -> HealthResponse: """ Check whether the workflow server is helathy or not @@ -46,9 +51,9 @@ async def is_healthy(self) -> bool: async with self._get_client() as client: response = await client.get("/health") response.raise_for_status() - return response.json().get("status", "") == "healthy" + return HealthResponse.model_validate(response.json()) - async def list_workflows(self) -> list[str]: + async def list_workflows(self) -> WorkflowsListResponse: """ List workflows @@ -60,15 +65,15 @@ async def list_workflows(self) -> list[str]: response.raise_for_status() - return response.json()["workflows"] + return WorkflowsListResponse.model_validate(response.json()) async def run_workflow( self, workflow_name: str, handler_id: Optional[str] = None, - start_event: Union[StartEvent, dict[str, Any], str, None] = None, + start_event: Union[StartEvent, dict[str, Any], None] = None, context: Union[Context, dict[str, Any], None] = None, - ) -> Any: + ) -> HandlerData: """ Run the workflow and wait until completion. @@ -78,11 +83,11 @@ async def run_workflow( handler_id (Optional[str]): Workflow handler identifier to continue from a previous completed run. Returns: - Any: Result of the workflow + HandlerDict: Handler state including result and metadata """ if start_event is not None: try: - start_event = serdes_event(start_event) + start_event = _serialize_event(start_event) except Exception as e: raise ValueError( f"Impossible to serialize the start event because of: {e}" @@ -105,7 +110,7 @@ async def run_workflow( response.raise_for_status() - return response.json()["result"] + return HandlerData.model_validate(response.json()) async def run_workflow_nowait( self, @@ -113,7 +118,7 @@ async def run_workflow_nowait( handler_id: Optional[str] = None, start_event: Union[StartEvent, dict[str, Any], None] = None, context: Union[Context, dict[str, Any], None] = None, - ) -> dict[str, Any]: + ) -> HandlerData: """ Run the workflow in the background. @@ -123,11 +128,11 @@ async def run_workflow_nowait( handler_id (Optional[str]): Workflow handler identifier to continue from a previous completed run. Returns: - dict[str, Any]: JSON representation of the handler running the workflow + HandlerDict: JSON representation of the handler running the workflow """ if start_event is not None: try: - start_event = serdes_event(start_event) + start_event = _serialize_event(start_event) except Exception as e: raise ValueError( f"Impossible to serialize the start event because of: {e}" @@ -137,8 +142,8 @@ async def run_workflow_nowait( context = context.to_dict() except Exception as e: raise ValueError(f"Impossible to serialize the context because of: {e}") - request_body = { - "start_event": start_event or serdes_event(StartEvent()), + request_body: dict[str, Any] = { + "start_event": start_event or _serialize_event(StartEvent()), "context": context or {}, } if handler_id: @@ -150,7 +155,7 @@ async def run_workflow_nowait( response.raise_for_status() - return response.json() + return HandlerData.model_validate(response.json()) async def get_workflow_events( self, @@ -211,9 +216,11 @@ async def get_workflow_events( async def send_event( self, handler_id: str, - event: Union[Event, dict[str, Any], str], + event: Union[ + Event, dict[str, Any] + ], # either an Event object, or a dictionary representation (with type metadata and embedded value) step: Optional[str] = None, - ) -> bool: + ) -> SendEventResponse: """ Send an event to the workflow. @@ -223,24 +230,22 @@ async def send_event( step (Optional[str]): Step to send the event to (optional, defaults to None) Returns: - bool: Success status of the send operation + SendEventResponse: Confirmation of the send operation """ try: - event = serdes_event(event) + serialized_event: dict[str, Any] = _serialize_event(event) except Exception as e: raise ValueError(f"Error while serializing the provided event: {e}") - request_body = {"event": event} + request_body: dict[str, Any] = {"event": serialized_event} if step: - request_body.update({"step": step}) + request_body["step"] = step async with self._get_client() as client: response = await client.post(f"/events/{handler_id}", json=request_body) response.raise_for_status() - return response.json()["status"] == "sent" + return SendEventResponse.model_validate(response.json()) - async def get_result( - self, handler_id: str, as_handler: bool = False - ) -> Union[RunResultT, None, HandlerDict]: + async def get_result(self, handler_id: str) -> HandlerData: """ Get the result of the workflow associated with the specified handler ID. @@ -255,15 +260,9 @@ async def get_result( response = await client.get(f"/results/{handler_id}") response.raise_for_status() - if response.status_code == 202: - return None - - if not as_handler: - return response.json()["result"] - else: - return response.json() + return HandlerData.model_validate(response.json()) - async def get_handlers(self) -> list[HandlerDict]: + async def get_handlers(self) -> HandlersListResponse: """ Get all the workflow handlers. @@ -274,4 +273,10 @@ async def get_handlers(self) -> list[HandlerDict]: response = await client.get("/handlers") response.raise_for_status() - return response.json()["handlers"] + return HandlersListResponse.model_validate(response.json()) + + +def _serialize_event(event: Union[Event, dict[str, Any]]) -> dict[str, Any]: + if isinstance(event, dict): + return event # assumes you know what you are doing. In many cases this needs to be a dict that contains type metadata and the value + return JsonSerializer().serialize_value(event) diff --git a/src/workflows/protocol/__init__.py b/src/workflows/protocol/__init__.py new file mode 100644 index 0000000..f413c06 --- /dev/null +++ b/src/workflows/protocol/__init__.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +from typing import Any, Literal +from pydantic import BaseModel + +# Shared protocol types between client and server + +# Mirrors server.store Status +Status = Literal["running", "completed", "failed", "cancelled"] + + +class HandlerData(BaseModel): + handler_id: str + workflow_name: str + run_id: str | None + error: str | None + # result is workflow-defined; None if not completed + result: Any | None + status: Status + started_at: str + updated_at: str | None + completed_at: str | None + + +class HandlersListResponse(BaseModel): + handlers: list[HandlerData] + + +class HealthResponse(BaseModel): + status: Literal["healthy"] + + +class WorkflowsListResponse(BaseModel): + workflows: list[str] + + +class SendEventResponse(BaseModel): + status: Literal["sent"] + + +class CancelHandlerResponse(BaseModel): + status: Literal["deleted", "cancelled"] + + +class WorkflowSchemaResponse(BaseModel): + start: dict[str, Any] + stop: dict[str, Any] + + +class WorkflowEventsListResponse(BaseModel): + events: list[dict[str, Any]] + + +class WorkflowGraphResponse(BaseModel): + graph: "WorkflowGraphNodeEdges" + + +class WorkflowGraphNode(BaseModel): + id: str + label: str + node_type: str + title: str | None + event_type: str | None + + +class WorkflowGraphEdge(BaseModel): + source: str + target: str + + +class WorkflowGraphNodeEdges(BaseModel): + nodes: list[WorkflowGraphNode] + edges: list[WorkflowGraphEdge] + + +__all__ = [ + "Status", + "HandlerData", + "HandlersListResponse", + "HealthResponse", + "WorkflowsListResponse", + "SendEventResponse", + "CancelHandlerResponse", + "WorkflowSchemaResponse", + "WorkflowEventsListResponse", + "WorkflowGraphResponse", + "WorkflowGraphNode", + "WorkflowGraphEdge", + "WorkflowGraphNodeEdges", +] diff --git a/src/workflows/server/representation_utils.py b/src/workflows/server/representation_utils.py index 060a5a7..e126483 100644 --- a/src/workflows/server/representation_utils.py +++ b/src/workflows/server/representation_utils.py @@ -1,5 +1,5 @@ -from dataclasses import dataclass, asdict -from typing import List, Optional, Any +from dataclasses import dataclass +from typing import List, Optional from workflows.events import ( StopEvent, @@ -7,6 +7,11 @@ HumanResponseEvent, ) from workflows.decorators import StepConfig +from workflows.protocol import ( + WorkflowGraphEdge, + WorkflowGraphNode, + WorkflowGraphNodeEdges, +) from workflows.utils import ( get_steps_from_class, get_steps_from_instance, @@ -26,14 +31,14 @@ class DrawWorkflowNode: None # Store the actual event type for styling decisions ) - def to_dict(self) -> dict[str, Any]: - d = asdict(self) - ev_type = d.pop("event_type") - if ev_type: - d["event_type"] = ev_type.__name__ - else: - d["event_type"] = ev_type - return d + def to_response_model(self) -> WorkflowGraphNode: + return WorkflowGraphNode( + id=self.id, + label=self.label, + node_type=self.node_type, + title=self.title, + event_type=self.event_type.__name__ if self.event_type else None, + ) @dataclass @@ -43,8 +48,11 @@ class DrawWorkflowEdge: source: str target: str - def to_dict(self) -> dict[str, str]: - return asdict(self) + def to_response_model(self) -> WorkflowGraphEdge: + return WorkflowGraphEdge( + source=self.source, + target=self.target, + ) @dataclass @@ -54,11 +62,11 @@ class DrawWorkflowGraph: nodes: List[DrawWorkflowNode] edges: List[DrawWorkflowEdge] - def to_dict(self) -> dict[str, list]: - return { - "nodes": [node.to_dict() for node in self.nodes], - "edges": [edge.to_dict() for edge in self.edges], - } + def to_response_model(self) -> WorkflowGraphNodeEdges: + return WorkflowGraphNodeEdges( + nodes=[node.to_response_model() for node in self.nodes], + edges=[edge.to_response_model() for edge in self.edges], + ) def _truncate_label(label: str, max_length: int) -> str: diff --git a/src/workflows/server/server.py b/src/workflows/server/server.py index 172392d..94c4616 100644 --- a/src/workflows/server/server.py +++ b/src/workflows/server/server.py @@ -9,7 +9,7 @@ import logging from importlib.metadata import version from pathlib import Path -from typing import Any, AsyncGenerator, TypedDict +from typing import Any, AsyncGenerator from datetime import datetime, timezone from pydantic import BaseModel @@ -37,6 +37,16 @@ from workflows.handler import WorkflowHandler +from workflows.protocol import ( + CancelHandlerResponse, + HandlerData, + HandlersListResponse, + HealthResponse, + SendEventResponse, + WorkflowEventsListResponse, + WorkflowGraphResponse, + WorkflowSchemaResponse, +) from workflows.server.abstract_workflow_store import ( AbstractWorkflowStore, EmptyWorkflowStore, @@ -45,24 +55,14 @@ Status, ) from workflows.types import RunResultT -from .utils import nanoid, serdes_event + +# Protocol models are used on the client side; server responds with plain dicts +from .utils import nanoid from .representation_utils import _extract_workflow_structure logger = logging.getLogger() -class HandlerDict(TypedDict): - handler_id: str - workflow_name: str - run_id: str | None # run_id of the handler, easier for debugging - error: str | None - result: RunResultT | None - status: Status - started_at: str - updated_at: str | None - completed_at: str | None - - class WorkflowServer: def __init__( self, @@ -335,7 +335,7 @@ async def _health_check(self, request: Request) -> JSONResponse: example: healthy required: [status] """ - return JSONResponse({"status": "healthy"}) + return JSONResponse(HealthResponse(status="healthy").model_dump()) async def _list_workflows(self, request: Request) -> JSONResponse: """ @@ -402,7 +402,7 @@ async def _list_workflow_events(self, request: Request) -> JSONResponse: for event in events: event_objs.append(event.model_json_schema()) - return JSONResponse({"events": event_objs}) + return JSONResponse(WorkflowEventsListResponse(events=event_objs).model_dump()) async def _run_workflow(self, request: Request) -> JSONResponse: """ @@ -469,7 +469,7 @@ async def _run_workflow(self, request: Request) -> JSONResponse: ) wrapper = self._run_workflow_handler(handler_id, workflow.name, handler) await handler - return JSONResponse(wrapper.to_dict()) + return JSONResponse(wrapper.to_response_model().model_dump()) except Exception as e: raise HTTPException(detail=f"Error running workflow: {e}", status_code=500) @@ -522,7 +522,11 @@ async def _get_events_schema(self, request: Request) -> JSONResponse: status_code=500, ) - return JSONResponse({"start": start_event_schema, "stop": stop_event_schema}) + return JSONResponse( + WorkflowSchemaResponse( + start=start_event_schema, stop=stop_event_schema + ).model_dump() + ) async def _get_workflow_representation(self, request: Request) -> JSONResponse: """ @@ -563,8 +567,9 @@ async def _get_workflow_representation(self, request: Request) -> JSONResponse: detail=f"Error while getting JSON workflow representation: {e}", status_code=500, ) - - return JSONResponse({"graph": workflow_graph.to_dict()}) + return JSONResponse( + WorkflowGraphResponse(graph=workflow_graph.to_response_model()).model_dump() + ) async def _run_workflow_nowait(self, request: Request) -> JSONResponse: """ @@ -630,7 +635,7 @@ async def _run_workflow_nowait(self, request: Request) -> JSONResponse: workflow.name, handler, ) - return JSONResponse(wrapper.to_dict()) + return JSONResponse(wrapper.to_response_model().model_dump()) async def _get_workflow_result(self, request: Request) -> JSONResponse: """ @@ -674,14 +679,15 @@ async def _get_workflow_result(self, request: Request) -> JSONResponse: handler = wrapper.run_handler if not handler.done(): - resp = wrapper.to_dict() - return JSONResponse(resp, status_code=202) + return JSONResponse( + wrapper.to_response_model().model_dump(), status_code=202 + ) try: result = await handler self._results[handler_id] = result - return JSONResponse(wrapper.to_dict()) + return JSONResponse(wrapper.to_response_model().model_dump()) except Exception as e: raise HTTPException( detail=f"Error getting workflow result: {e}", status_code=500 @@ -815,8 +821,8 @@ async def _get_handlers(self, request: Request) -> JSONResponse: schema: $ref: '#/components/schemas/HandlersList' """ - items = [wrapper.to_dict() for wrapper in self._handlers.values()] - return JSONResponse({"handlers": items}) + items = [wrapper.to_response_model() for wrapper in self._handlers.values()] + return JSONResponse(HandlersListResponse(handlers=items).model_dump()) async def _post_event(self, request: Request) -> JSONResponse: """ @@ -927,7 +933,7 @@ async def _post_event(self, request: Request) -> JSONResponse: detail=f"Failed to send event: {e}", status_code=400 ) - return JSONResponse({"status": "sent"}) + return JSONResponse(SendEventResponse(status="sent").model_dump()) except HTTPException: raise @@ -1010,7 +1016,11 @@ async def _cancel_handler(self, request: Request) -> JSONResponse: ) ) - return JSONResponse({"status": "deleted" if purge else "cancelled"}) + return JSONResponse( + CancelHandlerResponse( + status="deleted" if purge else "cancelled" + ).model_dump() + ) # # Private methods @@ -1038,8 +1048,13 @@ async def _extract_run_params( # Extract custom StartEvent if present start_event = None if start_event_data is not None: + serializer = JsonSerializer() try: - start_event = serdes_event(start_event_data, False) + start_event = ( + serializer.deserialize(start_event_data) + if isinstance(start_event_data, str) + else serializer.deserialize_value(start_event_data) + ) if isinstance(start_event, dict): start_event = workflow.start_event_class.model_validate( start_event @@ -1225,8 +1240,8 @@ class _WorkflowHandler: updated_at: datetime completed_at: datetime | None - def to_dict(self) -> HandlerDict: - return HandlerDict( + def to_response_model(self) -> HandlerData: + return HandlerData( handler_id=self.handler_id, workflow_name=self.workflow_name, run_id=self.run_handler.run_id, diff --git a/src/workflows/server/utils.py b/src/workflows/server/utils.py index 57e7564..ff2beab 100644 --- a/src/workflows/server/utils.py +++ b/src/workflows/server/utils.py @@ -4,38 +4,9 @@ import secrets import string -from pydantic import BaseModel -from typing import Union, Any -from workflows.context.serializers import JsonSerializer - alphabet = string.ascii_letters + string.digits # A-Z, a-z, 0-9 def nanoid(size: int = 10) -> str: """Returns a unique identifier with the format 'kY2xP9hTnQ'.""" return "".join(secrets.choice(alphabet) for _ in range(size)) - - -def serdes_event( - event: Union[dict[str, Any], BaseModel, str], serialize: bool = True -) -> Any: - """ - Serialize or deserialize a start event. - - Args: - event (Union[dict[str, Any], BaseModel, str]): Input event - serialize (bool): Serialize if true, deserialize if false. - """ - serializer = JsonSerializer() - if serialize: - if isinstance(event, (BaseModel, dict)): - event = serializer.serialize(event) - else: - event = serializer.serialize_value(event) - return event - else: - if isinstance(event, str): - event = serializer.deserialize(event) - else: - event = serializer.deserialize_value(event) - return event diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 83baaf0..aab1acb 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -13,47 +13,89 @@ def server() -> WorkflowServer: return ws -@pytest.mark.asyncio -async def test_client(server: WorkflowServer) -> None: +@pytest.fixture() +def client(server: WorkflowServer) -> WorkflowClient: transport = ASGITransport(server.app) - client = WorkflowClient(httpx_kwargs={"transport": transport}) + return WorkflowClient(httpx_kwargs={"transport": transport}) + + +@pytest.mark.asyncio +async def test_is_healthy(client: WorkflowClient) -> None: is_healthy = await client.is_healthy() - assert is_healthy + assert is_healthy.status == "healthy" + + +@pytest.mark.asyncio +async def test_list_workflows(client: WorkflowClient) -> None: wfs = await client.list_workflows() - assert isinstance(wfs, list) - assert len(wfs) == 1 - assert wfs[0] == "greeting" + assert len(wfs.workflows) == 1 + assert wfs.workflows[0] == "greeting" + + +@pytest.mark.asyncio +async def test_run_nowait_and_stream_events(client: WorkflowClient) -> None: handler = await client.run_workflow_nowait( "greeting", start_event=InputEvent(greeting="hello", name="John") ) - assert isinstance(handler, dict) - handler_id = handler["handler_id"] + assert handler.handler_id + handler_id = handler.handler_id + events = [] async for event in client.get_workflow_events(handler_id=handler_id): assert isinstance(event, dict) events.append(event) assert len(events) == 3 + + +@pytest.mark.asyncio +async def test_get_result_for_handler(client: WorkflowClient) -> None: + handler = await client.run_workflow_nowait( + "greeting", start_event=InputEvent(greeting="hello", name="John") + ) + handler_id = handler.handler_id + # wait for completion + async for event in client.get_workflow_events(handler_id=handler_id): + pass + result = await client.get_result(handler_id) - assert result is not None - res = OutputEvent.model_validate(result) + assert result.result is not None + res = OutputEvent.model_validate(result.result) assert "John" in res.greeting and "!" in res.greeting and "hello" in res.greeting - result = await client.get_result(handler_id, as_handler=True) - assert isinstance(result, dict) - assert result["handler_id"] == handler_id + + # Result should be retrievable again and reference the same handler + result_again = await client.get_result(handler_id) + assert result_again.handler_id == handler_id + + +@pytest.mark.asyncio +async def test_get_handlers(client: WorkflowClient) -> None: + handler = await client.run_workflow_nowait( + "greeting", start_event=InputEvent(greeting="hello", name="John") + ) + handler_id = handler.handler_id + handlers = await client.get_handlers() - assert isinstance(handlers, list) - assert len(handlers) == 1 - assert handlers[0] == result + assert len(handlers.handlers) == 1 + assert handlers.handlers[0].handler_id == handler_id + + +@pytest.mark.asyncio +async def test_run_workflow_sync_result(client: WorkflowClient) -> None: result = await client.run_workflow( "greeting", start_event=InputEvent(greeting="hello", name="John") ) assert result is not None - res = OutputEvent.model_validate(result) + res = OutputEvent.model_validate(result.result) assert "John" in res.greeting and "!" in res.greeting and "hello" in res.greeting + + +@pytest.mark.asyncio +async def test_stream_events_including_internal(client: WorkflowClient) -> None: handler = await client.run_workflow_nowait( "greeting", start_event=InputEvent(greeting="hello", name="John") ) - handler_id = handler["handler_id"] + handler_id = handler.handler_id + events = [] async for event in client.get_workflow_events( handler_id=handler_id, include_internal_events=True diff --git a/tests/server/test_handler_serialization.py b/tests/server/test_handler_serialization.py index 34a5ea9..d8c3752 100644 --- a/tests/server/test_handler_serialization.py +++ b/tests/server/test_handler_serialization.py @@ -10,6 +10,7 @@ from workflows.events import StopEvent, Event from workflows.handler import WorkflowHandler +from workflows.protocol import HandlerData from workflows.server.server import _WorkflowHandler @@ -44,10 +45,11 @@ async def noop() -> None: completed_at=now, ) - d = wrapper.to_dict() + response_model = wrapper.to_response_model() # JSON serialization should not error - s = json.dumps(d) - reparsed = json.loads(s) + s = json.dumps(response_model.model_dump()) + reparsed_dict = json.loads(s) + reparsed = HandlerData.model_validate(reparsed_dict) # Round-trip consistency - assert reparsed == d + assert reparsed == response_model diff --git a/tests/server/test_utils.py b/tests/server/test_utils.py index fed9635..bde9bab 100644 --- a/tests/server/test_utils.py +++ b/tests/server/test_utils.py @@ -1,10 +1,7 @@ # SPDX-License-Identifier: MIT # Copyright (c) 2025 LlamaIndex Inc. -import pytest -from typing import Union -from workflows.server.utils import nanoid, serdes_event -from workflows.events import StartEvent +from workflows.server.utils import nanoid def test_nanoid_default_length() -> None: @@ -46,39 +43,3 @@ def test_nanoid_negative_length() -> None: result = nanoid(-10) assert result == "" - - -def test_serdes_event_serialization() -> None: - event: Union[str, dict, StartEvent] = {"hello": "world"} - ser_event = serdes_event(event) - assert isinstance(ser_event, str) - assert ser_event == '{"hello": "world"}' - event = StartEvent(message="hello") # type: ignore - ser_event = serdes_event(event) - assert isinstance(ser_event, str) - assert ( - ser_event - == '{"__is_pydantic": true, "value": {"_data": {"message": "hello"}}, "qualified_name": "workflows.events.StartEvent"}' - ) - event = '{"hello": "world"}' - ser_event = serdes_event(event) - assert isinstance(ser_event, str) - assert ser_event == '{"hello": "world"}' - event = {"type": str} - with pytest.raises(ValueError): - serdes_event(event) - - -def test_serdes_event_deserialization() -> None: - event: Union[str, dict] = '{"hello": "world"}' - deser_event = serdes_event(event, serialize=False) - assert isinstance(deser_event, dict) - assert deser_event == {"hello": "world"} - event = '{"__is_pydantic": true, "value": {"_data": {"message": "hello"}}, "qualified_name": "workflows.events.StartEvent"}' - deser_event = serdes_event(event, serialize=False) - assert isinstance(deser_event, StartEvent) - assert deser_event == StartEvent(message="hello") # type: ignore - event = {"hello": "world"} - ser_event = serdes_event(event, serialize=False) - assert isinstance(ser_event, dict) - assert ser_event == {"hello": "world"} diff --git a/uv.lock b/uv.lock index 77b4118..39e8a83 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.10'", @@ -541,6 +541,9 @@ dependencies = [ ] [package.optional-dependencies] +client = [ + { name = "httpx" }, +] server = [ { name = "starlette" }, { name = "uvicorn" }, @@ -562,13 +565,14 @@ dev = [ [package.metadata] requires-dist = [ { name = "eval-type-backport", marker = "python_full_version < '3.10'", specifier = ">=0.2.2" }, + { name = "httpx", marker = "extra == 'client'", specifier = ">=0.28.1,<1" }, { name = "llama-index-instrumentation", specifier = ">=0.1.0" }, { name = "pydantic", specifier = ">=2.11.5" }, { name = "starlette", marker = "extra == 'server'", specifier = ">=0.39.0" }, { name = "typing-extensions", specifier = ">=4.6.0" }, { name = "uvicorn", marker = "extra == 'server'", specifier = ">=0.32.0" }, ] -provides-extras = ["server"] +provides-extras = ["server", "client"] [package.metadata.requires-dev] dev = [ From 8eb93f51470677ba193a16d285f464a291f3588a Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Fri, 10 Oct 2025 16:20:40 -0400 Subject: [PATCH 13/13] make more configurable (#135) --- examples/client/base/workflow_client.py | 2 +- .../human_in_the_loop/workflow_client_hitl.py | 2 +- src/workflows/client/client.py | 50 ++++++++++++------- src/workflows/protocol/__init__.py | 2 +- tests/client/test_client.py | 5 +- 5 files changed, 37 insertions(+), 24 deletions(-) diff --git a/examples/client/base/workflow_client.py b/examples/client/base/workflow_client.py index 52ec85b..1574606 100644 --- a/examples/client/base/workflow_client.py +++ b/examples/client/base/workflow_client.py @@ -15,7 +15,7 @@ class InputNumbers(StartEvent): async def main() -> None: - client = WorkflowClient(protocol="http", host="localhost", port=8000) + client = WorkflowClient(base_url="http://localhost:8000") workflows = await client.list_workflows() print("===== AVAILABLE WORKFLOWS ====") print(workflows) diff --git a/examples/client/human_in_the_loop/workflow_client_hitl.py b/examples/client/human_in_the_loop/workflow_client_hitl.py index 8b64eca..686fb68 100644 --- a/examples/client/human_in_the_loop/workflow_client_hitl.py +++ b/examples/client/human_in_the_loop/workflow_client_hitl.py @@ -16,7 +16,7 @@ class OutEvent(StopEvent): async def main() -> None: - client = WorkflowClient(protocol="http", port=8000, host="localhost") + client = WorkflowClient(base_url="http://localhost:8000") handler = await client.run_workflow_nowait("human") handler_id = handler.handler_id print(handler_id) diff --git a/src/workflows/client/client.py b/src/workflows/client/client.py index 4eb7a7f..e82693d 100644 --- a/src/workflows/client/client.py +++ b/src/workflows/client/client.py @@ -1,7 +1,14 @@ import httpx import json -from typing import Literal, Any, Union, AsyncGenerator, AsyncIterator, Optional +from typing import ( + Any, + Union, + AsyncGenerator, + AsyncIterator, + Optional, + overload, +) from contextlib import asynccontextmanager from workflows.context.serializers import JsonSerializer from workflows.events import StartEvent, Event @@ -16,30 +23,35 @@ class WorkflowClient: + @overload + def __init__(self, *, httpx_client: httpx.AsyncClient): ... + @overload + def __init__( + self, + *, + base_url: str, + ): ... + def __init__( self, - protocol: Optional[Literal["http", "https"]] = None, - host: Optional[str] = None, - port: Optional[int] = None, - timeout: Optional[int] = None, - httpx_kwargs: Optional[dict[str, Any]] = None, + *, + httpx_client: Union[httpx.AsyncClient, None] = None, + base_url: Union[str, None] = None, ): - # TODO: middleware-related logic - self.protocol = protocol or "http" - self.host = host or "localhost" - self.port = port or 8000 - self.timeout = timeout or 600 - self.httpx_kwargs = httpx_kwargs or {} - # TODO: add some basic TLS/verification and auth features + if httpx_client is None and base_url is None: + raise ValueError("Either httpx_client or base_url must be provided") + if httpx_client is not None and base_url is not None: + raise ValueError("Only one of httpx_client or base_url must be provided") + self.httpx_client = httpx_client + self.base_url = base_url @asynccontextmanager async def _get_client(self) -> AsyncIterator[httpx.AsyncClient]: - async with httpx.AsyncClient( - base_url=self.protocol + "://" + self.host + ":" + str(self.port), - timeout=self.timeout, - **self.httpx_kwargs, - ) as client: - yield client + if self.httpx_client: + yield self.httpx_client + else: + async with httpx.AsyncClient(base_url=self.base_url or "") as client: + yield client async def is_healthy(self) -> HealthResponse: """ diff --git a/src/workflows/protocol/__init__.py b/src/workflows/protocol/__init__.py index f413c06..04219c2 100644 --- a/src/workflows/protocol/__init__.py +++ b/src/workflows/protocol/__init__.py @@ -52,7 +52,7 @@ class WorkflowEventsListResponse(BaseModel): class WorkflowGraphResponse(BaseModel): - graph: "WorkflowGraphNodeEdges" + graph: WorkflowGraphNodeEdges class WorkflowGraphNode(BaseModel): diff --git a/tests/client/test_client.py b/tests/client/test_client.py index aab1acb..a38c1d7 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -1,6 +1,6 @@ import pytest -from httpx import ASGITransport +from httpx import ASGITransport, AsyncClient from workflows.server.server import WorkflowServer from workflows.client import WorkflowClient from .greeting_workflow import greeting_wf, InputEvent, OutputEvent @@ -16,7 +16,8 @@ def server() -> WorkflowServer: @pytest.fixture() def client(server: WorkflowServer) -> WorkflowClient: transport = ASGITransport(server.app) - return WorkflowClient(httpx_kwargs={"transport": transport}) + httpx_client = AsyncClient(transport=transport, base_url="http://test") + return WorkflowClient(httpx_client=httpx_client) @pytest.mark.asyncio