run_assistant_stream.py 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384
  1. """
  2. test for stream api
  3. """
  4. import logging
  5. from openai import AssistantEventHandler
  6. from openai.types.beta import AssistantStreamEvent
  7. from openai.types.beta.assistant_stream_event import ThreadMessageInProgress
  8. from openai.types.beta.threads.message import Message
  9. from openai.types.beta.threads.runs import ToolCall, ToolCallDelta
  10. from examples.prerun import client
  11. class EventHandler(AssistantEventHandler):
  12. def __init__(self) -> None:
  13. super().__init__()
  14. def on_tool_call_created(self, tool_call: ToolCall) -> None:
  15. logging.info("=====> tool call created: %s\n", tool_call)
  16. def on_tool_call_delta(self, delta: ToolCallDelta, snapshot: ToolCall) -> None:
  17. logging.info("=====> tool call delta")
  18. logging.info("delta : %s", delta)
  19. logging.info("snapshot: %s\n", snapshot)
  20. def on_tool_call_done(self, tool_call: ToolCall) -> None:
  21. logging.info("=====> tool call done: %s\n", tool_call)
  22. self.tool_call = tool_call
  23. def on_message_created(self, message: Message) -> None:
  24. logging.info("=====> message created: %s\n", message)
  25. def on_message_delta(self, delta, snapshot: Message) -> None:
  26. logging.info("=====> message delta")
  27. logging.info("=====> delta : %s", delta)
  28. logging.info("=====> snapshot: %s\n", snapshot)
  29. def on_message_done(self, message: Message) -> None:
  30. logging.info("=====> message done: %s\n", message)
  31. def on_text_created(self, text) -> None:
  32. logging.info("=====> text create: %s\n", text)
  33. def on_text_delta(self, delta, snapshot) -> None:
  34. logging.info("=====> text delta")
  35. logging.info("delta : %s", delta)
  36. logging.info("snapshot: %s\n", snapshot)
  37. def on_text_done(self, text) -> None:
  38. logging.info("text done: %s\n", text)
  39. def on_event(self, event: AssistantStreamEvent) -> None:
  40. if isinstance(event, ThreadMessageInProgress):
  41. logging.info("event: %s\n", event)
  42. if __name__ == "__main__":
  43. assistant = client.beta.assistants.create(
  44. name="Assistant Demo",
  45. instructions="you are a personal assistant, reply 'hello' to user",
  46. model="gpt-3.5-turbo-1106",
  47. )
  48. logging.info("=====> : %s\n", assistant)
  49. thread = client.beta.threads.create()
  50. logging.info("=====> : %s\n", thread)
  51. message = client.beta.threads.messages.create(
  52. thread_id=thread.id,
  53. role="user",
  54. content="hello",
  55. )
  56. logging.info("=====> : %s\n", message)
  57. event_handler = EventHandler()
  58. with client.beta.threads.runs.create_and_stream(
  59. thread_id=thread.id,
  60. assistant_id=assistant.id,
  61. event_handler=event_handler,
  62. extra_body={
  63. "stream_options": {"include_usage": True}
  64. }
  65. ) as stream:
  66. stream.until_done()