Skip to content

Commit 1d3a6a1

Browse files
authored
dapr default better devex (#214)
* make devex a bit better when there is only one conversation component Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> * lint Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> * lint Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> * move import to lazy eval to avoid issues on test Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> * lint Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> * lint Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> * cleanup/refactor README.md for quickstart 01 Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> * use edge in our check function Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> --------- Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com>
1 parent 6870e8d commit 1d3a6a1

File tree

7 files changed

+180
-45
lines changed

7 files changed

+180
-45
lines changed

dapr_agents/agents/durableagent/agent.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -120,10 +120,6 @@ def model_post_init(self, __context: Any) -> None:
120120

121121
self.register_agentic_system()
122122

123-
# Start the runtime if it's not already running
124-
logger.info("Starting workflow runtime...")
125-
self.start_runtime()
126-
127123
async def run(self, input_data: Union[str, Dict[str, Any]]) -> Any:
128124
"""
129125
Fire up the workflow, wait for it to complete, then return the final serialized_output.

dapr_agents/llm/dapr/chat.py

Lines changed: 42 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
)
1616

1717
from pydantic import BaseModel, Field
18-
1918
from dapr_agents.llm.chat import ChatClientBase
2019
from dapr_agents.llm.dapr.client import DaprInferenceClientBase
2120
from dapr_agents.llm.utils import RequestHandler, ResponseHandler
@@ -80,6 +79,8 @@ class DaprChatClient(DaprInferenceClientBase, ChatClientBase):
8079

8180
component_name: Optional[str] = None
8281

82+
component_name: Optional[str] = None
83+
8384
# Only function_call–style structured output is supported
8485
SUPPORTED_STRUCTURED_MODES: ClassVar[set[str]] = {"function_call"}
8586

@@ -92,8 +93,8 @@ def model_post_init(self, __context: Any) -> None:
9293
if not self._llm_component:
9394
self._llm_component = os.environ.get("DAPR_LLM_COMPONENT_DEFAULT")
9495
if not self._llm_component:
95-
raise ValueError(
96-
"You must provide a component_name or set DAPR_LLM_COMPONENT_DEFAULT in the environment."
96+
logger.debug(
97+
"No LLM component provided and no default component found in the environment. Will try to get it from the metadata at runtime."
9798
)
9899
super().model_post_init(__context)
99100

@@ -327,16 +328,11 @@ def generate(
327328
)
328329
# get metadata information from the dapr client
329330
metadata = self.client.dapr_client.get_metadata()
330-
extended_metadata = metadata.extended_metadata
331-
dapr_runtime_version = extended_metadata.get("daprRuntimeVersion", None)
332-
if dapr_runtime_version is not None:
333-
# Allow only versions >=1.16.0 and <2.0.0 for Alpha2 Chat Client
334-
if not is_version_supported(
335-
str(dapr_runtime_version), ">=1.16.0, edge, <2.0.0"
336-
):
337-
raise DaprRuntimeVersionNotSupportedError(
338-
f"!!!!! Dapr Runtime Version {dapr_runtime_version} is not supported with Alpha2 Dapr Chat Client. Only Dapr runtime versions >=1.16.0, edge,and <2.0.0 are supported."
339-
)
331+
_check_dapr_runtime_support(metadata)
332+
333+
llm_component = llm_component or self._llm_component
334+
if not llm_component:
335+
llm_component = _get_llm_component(metadata)
340336

341337
raw = self.client.chat_completion_alpha2(
342338
llm=llm_component or self._llm_component,
@@ -365,3 +361,36 @@ def generate(
365361
structured_mode=structured_mode,
366362
stream=False,
367363
)
364+
365+
366+
def _check_dapr_runtime_support(metadata: "GetMetadataResponse"): # noqa: F821
367+
"""Check if the Dapr runtime version is supported for Alpha2 Chat Client."""
368+
extended_metadata = metadata.extended_metadata
369+
dapr_runtime_version = extended_metadata.get("daprRuntimeVersion", None)
370+
if dapr_runtime_version is not None:
371+
# Allow only versions >=1.16.0, edge, and <2.0.0 for Alpha2 Chat Client
372+
if not is_version_supported(
373+
str(dapr_runtime_version), ">=1.16.0, edge, <2.0.0"
374+
):
375+
raise DaprRuntimeVersionNotSupportedError(
376+
f"!!!!! Dapr Runtime Version {dapr_runtime_version} is not supported with Alpha2 Dapr Chat Client. Only Dapr runtime versions >=1.16.0, edge, and <2.0.0 are supported."
377+
)
378+
379+
380+
def _get_llm_component(metadata: "GetMetadataResponse") -> str: # noqa: F821
381+
"""Get the LLM component from the metadata."""
382+
conversation_components = [
383+
component
384+
for component in metadata.registered_components
385+
if component.type.startswith("conversation.")
386+
]
387+
if len(conversation_components) == 1:
388+
return conversation_components[0].name
389+
elif len(conversation_components) > 1:
390+
raise ValueError(
391+
"Multiple LLM components found in the metadata. Please provide the component name explicitly (e.g. llm = DaprChatClient(component_name='openai')) or environment variable DAPR_LLM_COMPONENT_DEFAULT."
392+
)
393+
else:
394+
raise ValueError(
395+
"No LLM component provided and no default component found in the metadata."
396+
)

dapr_agents/workflow/base.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import inspect
44
import json
55
import logging
6+
import time
67
import sys
78
import uuid
89
from datetime import datetime, timezone
@@ -516,6 +517,9 @@ def start_runtime(self):
516517
self.wf_runtime.start()
517518
self.wf_runtime_is_running = True
518519

520+
logger.info("Sleeping for 5 seconds to ensure runtime is started.")
521+
time.sleep(5)
522+
519523
# Sync database state with Dapr workflow status after runtime starts
520524
# This ensures our database reflects the actual state of resumed workflows
521525
self._sync_workflow_state_after_startup()

quickstarts/01-hello-world/03_durable_agent.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,10 @@
99
import asyncio
1010
import logging
1111

12+
1213
from typing import List
1314
from pydantic import BaseModel, Field
14-
from dapr_agents import tool, DurableAgent, DaprChatClient
15+
from dapr_agents import tool, DurableAgent
1516
from dapr_agents.memory import ConversationDaprStateMemory
1617
from dotenv import load_dotenv
1718

@@ -38,11 +39,17 @@ def search_flights(destination: str) -> List[FlightOption]:
3839
]
3940

4041

41-
# one can use the environment variable to set the default component name
42+
# ----------------------------------------------------------------------------------------------------------------------
43+
# There are three ways to set the LLM component with DaprChatClient:
44+
#
45+
# 1. Explicitly instantiate the DaprChatClient with the component name
46+
# llm = DaprChatClient(component_name="openai")
47+
#
48+
# 2. Use the environment variable DAPR_LLM_COMPONENT_DEFAULT
4249
# os.environ.setdefault("DAPR_LLM_COMPONENT_DEFAULT", "openai")
43-
44-
# or directly pass the component name
45-
llm = DaprChatClient(component_name="openai")
50+
#
51+
# 3. If there is only one conversation component in the resources folder, it will be used by default
52+
# ----------------------------------------------------------------------------------------------------------------------
4653

4754

4855
async def main():
@@ -66,7 +73,7 @@ async def main():
6673
memory=ConversationDaprStateMemory(
6774
store_name="conversationstore", session_id="my-unique-id"
6875
),
69-
llm=llm,
76+
# llm=llm, # if you don't set the llm attribute, it will be by default set to DaprChatClient()
7077
)
7178

7279
await travel_planner.run("I want to find flights to Paris")

quickstarts/01-hello-world/README.md

Lines changed: 47 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,25 @@ This quickstart provides a hands-on introduction to Dapr Agents through simple e
44

55
## Prerequisites
66

7-
- Python 3.10 (recommended)
8-
- pip package manager
7+
- Python 3.10+ (recommended)
8+
- [uv package manager](https://docs.astral.sh/uv/getting-started/installation/)
99
- OpenAI API key (you can put in an .env file or directly in the `openai.yaml` file, but we recommend the .env file that is gitignored)
1010

1111
## Environment Setup
1212

1313
<details open>
1414
<summary><strong>Option 1: Using uv (Recommended)</strong></summary>
1515

16+
<!-- We include setting up the venv as part of the first step to make sure the venv is created and activated before the examples are run.-->
17+
18+
<!-- STEP
19+
name: Run basic LLM example
20+
expected_stdout_lines:
21+
- "Got response:"
22+
timeout_seconds: 30
23+
output_match_mode: substring
24+
-->
25+
1626
```bash
1727
# Create and activate virtual environment
1828
uv venv .venv
@@ -27,7 +37,7 @@ uv pip install -r requirements.txt
2737
<details>
2838
<summary><strong>Option 2: Using pip</strong></summary>
2939

30-
```bash
40+
```a shell [not setting type to avoid mechanical markdown execution]
3141
# Create a virtual environment
3242
python3.10 -m venv .venv
3343
@@ -39,17 +49,19 @@ source .venv/bin/activate
3949
4050
# Install dependencies
4151
pip install -r requirements.txt
42-
4352
```
4453

4554
</details>
4655

4756

48-
## Configuration
57+
## OpenAI API Key
4958

5059
> **Warning**
5160
> The examples will not work if you do not have a OpenAI API key exported in the environment.
5261
62+
<details open>
63+
<summary><strong>Option 1: Using .env file</strong></summary>
64+
5365
Create a `.env` file in the project root and add your OpenAI API key:
5466

5567
```env
@@ -59,23 +71,32 @@ OPENAI_API_KEY=your_api_key_here
5971
Replace `your_api_key_here` with your actual OpenAI API key.
6072

6173
Export the environment variables from the .env file to your shell:
62-
```bash
63-
export $(grep -v '^#' .env | xargs) # or if .env is in the root directory, you can just run `export $(grep -v '^#' ../../.env | xargs)`
74+
```a shell [not setting type to avoid mechanical markdown execution]
75+
export $(grep -v '^#' .env | xargs)
76+
77+
# or if .env is in the root directory of the repository,
78+
# export $(grep -v '^#' ../../.env | xargs)
79+
```
80+
81+
</details>
82+
83+
<details>
84+
<summary><strong>Option 2: Exporting the OpenAI API Key directly to the shell</strong></summary>
85+
86+
```a shell [not setting type to avoid mechanical markdown execution]
87+
export OPENAI_API_KEY=your_api_key_here
6488
```
6589

90+
Replace `your_api_key_here` with your actual OpenAI API key.
91+
92+
</details>
93+
6694
## Examples
6795

6896
### 1. Basic LLM Usage
6997

7098
Run the basic LLM example to see how to interact with OpenAI's language models:
7199

72-
<!-- STEP
73-
name: Run basic LLM example
74-
expected_stdout_lines:
75-
- "Got response:"
76-
timeout_seconds: 30
77-
output_match_mode: substring
78-
-->
79100
```bash
80101
python 01_ask_llm.py
81102
```
@@ -167,6 +188,9 @@ A stateful agent that uses Dapr Workflows to ensure durability and persistence o
167188
We are using the Dapr ChatClient to interact with the OpenAI API. In the components folder, we have a `openai.yaml` file that contains the configuration for the OpenAI API.
168189
You need to replace the `{YOUR_OPENAI_API_KEY}` with your actual OpenAI API key.
169190

191+
We are using the Dapr ChatClient to interact with the OpenAI API. In the components folder, we have a `openai.yaml` file that contains the configuration for the OpenAI API.
192+
You need to replace the `{YOUR_OPENAI_API_KEY}` with your actual OpenAI API key.
193+
170194
Make sure Dapr is initialized on your system:
171195

172196
```bash
@@ -180,14 +204,16 @@ name: Run basic LLM example
180204
expected_stdout_lines:
181205
- "I want to find flights to Paris"
182206
- "TravelBuddy"
183-
timeout_seconds: 30
207+
timeout_seconds: 60
184208
output_match_mode: substring
185209
-->
186210

187211

188-
We are using the `resolve_env_templates.py` script to resolve the environment variables in the components folder and substitute them with the actual values in your .env file, like the OpenAI API key.
212+
We are using the `resolve_env_templates.py` script to resolve the environment variables in the components folder and substitute them with the actual values in your environment, like the OpenAI API key.
189213

190214
```bash
215+
source .venv/bin/activate
216+
191217
dapr run --app-id stateful-llm --dapr-http-port 3500 --resources-path $(../resolve_env_templates.py ./components) -- python 03_durable_agent.py
192218
```
193219

@@ -321,6 +347,8 @@ expected_stdout_lines:
321347
output_match_mode: substring
322348
-->
323349
```bash
350+
source .venv/bin/activate
351+
324352
dapr run --app-id dapr-agent-wf --resources-path $(../resolve_env_templates.py ./components) -- python 04_chain_tasks.py
325353
```
326354
<!-- END_STEP -->
@@ -402,6 +430,8 @@ expected_stdout_lines:
402430
output_match_mode: substring
403431
-->
404432
```bash
433+
source .venv/bin/activate
434+
405435
python 05_agent_with_vectorstore.py
406436
```
407437
<!-- END_STEP -->
@@ -541,6 +571,7 @@ if __name__ == "__main__":
541571

542572
## Key Concepts
543573

574+
- **DaprChatClient**: The interface for interacting with Dapr's LLMs
544575
- **OpenAIChatClient**: The interface for interacting with OpenAI's LLMs
545576
- **Agent**: A class that combines an LLM with tools and instructions
546577
- **@tool decorator**: A way to create tools that agents can use

0 commit comments

Comments
 (0)