Skip to content

Commit 6db6d3c

Browse files
authored
Fix python ai template (#341)
Minor fixes #### PR Dependency Tree * **PR #341** 👈 This tree was auto-generated by [Charcoal](https://github.com/danerwilliams/charcoal)
1 parent ce5b946 commit 6db6d3c

File tree

1 file changed

+9
-15
lines changed
  • packages/cli/templates/python/ai/src

1 file changed

+9
-15
lines changed
Lines changed: 9 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,17 @@
1-
import os
21
import asyncio
2+
import os
33

44
from dotenv import load_dotenv
5-
from microsoft.teams.apps import App, AppOptions
65
from microsoft.teams.ai import ChatPrompt
6+
from microsoft.teams.apps import App
77
from microsoft.teams.devtools import DevToolsPlugin
88
from microsoft.teams.openai import OpenAICompletionsAIModel
99

1010
# Load variables from a .env file (if present) so os.environ.get() will pick them up.
1111
# Developers should copy `sample.env` -> `.env` to provide local values.
1212
load_dotenv()
1313

14-
app_options = AppOptions(
15-
plugins=[DevToolsPlugin()]
16-
)
17-
18-
app = App(app_options)
14+
app = App(plugins=[DevToolsPlugin()])
1915

2016
# Configure OpenAI model and key from environment (default model: gpt-4o)
2117
MODEL_NAME = os.environ.get("OPENAI_MODEL", "gpt-4o")
@@ -25,16 +21,14 @@
2521
key=OPENAI_KEY,
2622
)
2723

24+
2825
@app.on_message
29-
async def handle_message(event):
26+
async def handle_message(ctx):
3027
prompt = ChatPrompt(model=model)
31-
await prompt.send(
32-
event.activity.text,
33-
on_chunk=lambda chunk: event.stream.emit(chunk))
28+
result = await prompt.send(ctx.activity.text)
29+
if result.response.content:
30+
await ctx.send(result.response.content)
3431

35-
async def main():
36-
port = int(os.environ.get("PORT", "3978"))
37-
await app.start(port)
3832

3933
if __name__ == "__main__":
40-
asyncio.run(main())
34+
asyncio.run(app.start())

0 commit comments

Comments
 (0)