Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin' into feat/story-telling
Browse files Browse the repository at this point in the history
  • Loading branch information
plutoless committed Jan 6, 2025
2 parents e70473e + d28ff8b commit 550ecd6
Show file tree
Hide file tree
Showing 20 changed files with 38 additions and 35 deletions.
2 changes: 1 addition & 1 deletion .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// README at: https://github.com/devcontainers/templates/tree/main/src/docker-existing-dockerfile
{
"name": "ten_agent_dev",
"image": "ghcr.io/ten-framework/ten_agent_build:0.3.3",
"image": "ghcr.io/ten-framework/ten_agent_build:0.3.4",
"customizations": {
"vscode": {
"extensions": [
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM ghcr.io/ten-framework/ten_agent_build:0.3.3 AS builder
FROM ghcr.io/ten-framework/ten_agent_build:0.3.4 AS builder

ARG SESSION_CONTROL_CONF=session_control.conf

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
Cmd,
)
from ten.async_ten_env import AsyncTenEnv
from ten_ai_base.helper import get_properties_string
from ten_ai_base.config import BaseConfig
from ten_ai_base import AsyncLLMToolBaseExtension
from ten_ai_base.types import LLMToolMetadata, LLMToolMetadataParameter, LLMToolResult

Expand Down Expand Up @@ -61,13 +61,15 @@
# 2. https://learn.microsoft.com/en-us/bing/search-apis/bing-custom-search/overview
# 3. https://azure.microsoft.com/en-in/updates/bing-search-apis-will-transition-from-azure-cognitive-services-to-azure-marketplace-on-31-october-2023/

class BingSearchToolConfig(BaseConfig):
api_key: str = ""

class BingSearchToolExtension(AsyncLLMToolBaseExtension):

def __init__(self, name: str) -> None:
super().__init__(name)
self.api_key = None
self.session = None
self.config = None
self.k = 10

async def on_init(self, ten_env: AsyncTenEnv) -> None:
Expand All @@ -79,10 +81,9 @@ async def on_start(self, ten_env: AsyncTenEnv) -> None:
ten_env.log_debug("on_start")
await super().on_start(ten_env)

get_properties_string(
ten_env, [PROPERTY_API_KEY], lambda name, value: setattr(self, name, value)
)
if not self.api_key:
self.config = await BingSearchToolConfig.create_async(ten_env=ten_env)

if not self.config.api_key:
ten_env.log_info("API key is missing, exiting on_start")
return

Expand Down Expand Up @@ -146,7 +147,7 @@ async def _initialize_session(self, ten_env: AsyncTenEnv):

async def _bing_search_results(self, ten_env: AsyncTenEnv, search_term: str, count: int) -> List[dict]:
await self._initialize_session(ten_env)
headers = {"Ocp-Apim-Subscription-Key": self.api_key}
headers = {"Ocp-Apim-Subscription-Key": self.config.api_key}
params = {
"q": search_term,
"count": count,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def on_start(self, ten_env: TenEnvTester) -> None:
print("send hello_world")
ten_env.send_cmd(
new_cmd,
lambda ten_env, result: self.check_hello(ten_env, result),
lambda ten_env, result, _: self.check_hello(ten_env, result),
)

print("tester on_start_done")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def on_start(self, ten_env: TenEnvTester) -> None:
print("send hello_world")
ten_env.send_cmd(
new_cmd,
lambda ten_env, result: self.check_hello(ten_env, result),
lambda ten_env, result, _: self.check_hello(ten_env, result),
)

print("tester on_start_done")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def on_start(self, ten_env: TenEnvTester) -> None:
print("send hello_world")
ten_env.send_cmd(
new_cmd,
lambda ten_env, result: self.check_hello(ten_env, result),
lambda ten_env, result, _: self.check_hello(ten_env, result),
)

print("tester on_start_done")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def on_start(self, ten_env: TenEnvTester) -> None:
print("send hello_world")
ten_env.send_cmd(
new_cmd,
lambda ten_env, result: self.check_hello(ten_env, result),
lambda ten_env, result,_: self.check_hello(ten_env, result),
)

print("tester on_start_done")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def create_collection(self, ten: TenEnv, collection_name: str, wait: bool):
wait_event = threading.Event()
ten.send_cmd(
cmd_out,
lambda ten, result: wait_event.set(),
lambda ten, result, _: wait_event.set(),
)
if wait:
wait_event.wait()
Expand All @@ -97,7 +97,7 @@ def embedding(self, ten: TenEnv, path: str, texts: List[str]):
cmd_out = Cmd.create("embed_batch")
cmd_out.set_property_from_json("inputs", json.dumps(texts))
ten.send_cmd(
cmd_out, lambda ten, result: self.vector_store(ten, path, texts, result)
cmd_out, lambda ten, result, _: self.vector_store(ten, path, texts, result)
)

def vector_store(self, ten: TenEnv, path: str, texts: List[str], result: CmdResult):
Expand All @@ -114,7 +114,7 @@ def vector_store(self, ten: TenEnv, path: str, texts: List[str], result: CmdResu
content.append({"text": text, "embedding": embedding})
cmd_out.set_property_string("content", json.dumps(content))
# ten.log_info(json.dumps(content))
ten.send_cmd(cmd_out, lambda ten, result: self.file_chunked(ten, path))
ten.send_cmd(cmd_out, lambda ten, result, _: self.file_chunked(ten, path))

def file_chunked(self, ten: TenEnv, path: str):
if path in self.counters and path in self.expected:
Expand All @@ -137,7 +137,7 @@ def file_chunked(self, ten: TenEnv, path: str):
cmd_out.set_property_string("collection", self.new_collection_name)
ten.send_cmd(
cmd_out,
lambda ten, result: ten.log_info("send_cmd done"),
lambda ten, result, _: ten.log_info("send_cmd done"),
)
self.file_chunked_event.set()
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def on_start(self, ten_env: TenEnvTester) -> None:
print("send hello_world")
ten_env.send_cmd(
new_cmd,
lambda ten_env, result: self.check_hello(ten_env, result),
lambda ten_env, result, _: self.check_hello(ten_env, result),
)

print("tester on_start_done")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def do_POST(self):
self.ten.log_info("incoming request %s", input_file)
self.ten.send_cmd(
Cmd.create_from_json(input_file),
lambda ten, result: ten.log_info(
lambda ten, result, _: ten.log_info(
"finish send_cmd from http server %s %s", input_file, result
),
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def on_cmd(self, ten: TenEnv, cmd: Cmd) -> None:
new_cmd.set_property_from_json(None, cmd_json)
ten.send_cmd(
new_cmd,
lambda ten, result: ten.log_info("send_cmd done"),
lambda ten, result, _: ten.log_info("send_cmd done"),
)

cmd_result = CmdResult.create(StatusCode.OK)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def _get_query_embedding(self, query: str) -> List[float]:
wait_event = threading.Event()
resp: List[float]

def callback(_, result):
def callback(_, result, __):
nonlocal resp
nonlocal wait_event

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse:
resp: ChatResponse
wait_event = threading.Event()

def callback(_, result):
def callback(_, result, __):
self.ten.log_debug("LlamaLLM chat callback done")
nonlocal resp
nonlocal wait_event
Expand All @@ -71,7 +71,9 @@ def callback(_, result):
cmd = Cmd.create("call_chat")
cmd.set_property_string("messages", messages_str)
cmd.set_property_bool("stream", False)
self.ten.log_info(f"LlamaLLM chat send_cmd {cmd.get_name()}, messages {messages_str}")
self.ten.log_info(
f"LlamaLLM chat send_cmd {cmd.get_name()}, messages {messages_str}"
)

self.ten.send_cmd(cmd, callback)
wait_event.wait()
Expand Down Expand Up @@ -103,7 +105,7 @@ def gen() -> ChatResponseGen:
delta=delta_text,
)

def callback(_, result):
def callback(_, result, __):
nonlocal cur_tokens
nonlocal resp_queue

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]:
wait_event = threading.Event()
resp: List[NodeWithScore] = []

def cmd_callback(_, result):
def cmd_callback(_, result, __):
nonlocal resp
nonlocal wait_event
resp = format_node_result(self.ten, result)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -215,8 +215,8 @@ async def _handle_text_data(self, data: dict):
json_bytes = json.dumps(data).encode("utf-8")
cmd = Cmd.create("publish")
cmd.set_property_buf("message", json_bytes)
[result, _] = await self.ten_env.send_cmd(cmd)
self.ten_env.log_info(f"send_cmd result {result.to_json()}")
[cmd_result, _] = await self.ten_env.send_cmd(cmd)
self.ten_env.log_info(f"send_cmd result {cmd_result.to_json()}")
except Exception as e:
self.ten_env.log_error(f"Failed to handle text data: {e}")

Expand All @@ -225,7 +225,7 @@ async def _handle_user_state(self, data: dict):
json_bytes = json.dumps(data)
cmd = Cmd.create("set_presence_state")
cmd.set_property_string("states", json_bytes)
[result, _] = await self.ten_env.send_cmd(cmd)
self.ten_env.log_info(f"send_cmd result {result.to_json()}")
[cmd_result, _] = await self.ten_env.send_cmd(cmd)
self.ten_env.log_info(f"send_cmd result {cmd_result.to_json()}")
except Exception as e:
self.ten_env.log_error(f"Failed to handle user state: {e}")
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def on_start(self, ten_env: TenEnvTester) -> None:
print("send hello_world")
ten_env.send_cmd(
new_cmd,
lambda ten_env, result: self.check_hello(ten_env, result),
lambda ten_env, result, _: self.check_hello(ten_env, result),
)

print("tester on_start_done")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ def on_cmd(self, ten: TenEnv, cmd: Cmd) -> None:
cmd_out = Cmd.create("flush")
ten.send_cmd(
cmd_out,
lambda ten, result: ten.log_info("send_cmd flush done"),
lambda ten, result, _: ten.log_info("send_cmd flush done"),
)
elif cmd_name == "call_chat":
self.queue.put((cmd, ts))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def on_start(self, ten_env: TenEnvTester) -> None:
print("send hello_world")
ten_env.send_cmd(
new_cmd,
lambda ten_env, result: self.check_hello(ten_env, result),
lambda ten_env, result, _: self.check_hello(ten_env, result),
)

print("tester on_start_done")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def on_start(self, ten_env: TenEnvTester) -> None:
print("send hello_world")
ten_env.send_cmd(
new_cmd,
lambda ten_env, result: self.check_hello(ten_env, result),
lambda ten_env, result, _: self.check_hello(ten_env, result),
)

print("tester on_start_done")
Expand Down
4 changes: 2 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
services:
ten_agent_dev:
image: ghcr.io/ten-framework/ten_agent_build:0.3.3
image: ghcr.io/ten-framework/ten_agent_build:0.3.4
container_name: ten_agent_dev
platform: linux/amd64
tty: true
Expand Down Expand Up @@ -39,7 +39,7 @@ services:
environment:
- AGENT_SERVER_URL=http://ten_agent_dev:8080
# ten_graph_designer:
# image: ghcr.io/ten-framework/ten_graph_designer:dde0ff1
# image: ghcr.io/ten-framework/ten_graph_designer:4cc33b8
# container_name: ten_graph_designer
# restart: always
# ports:
Expand Down

0 comments on commit 550ecd6

Please sign in to comment.