|
@@ -0,0 +1,417 @@
|
|
|
+# # import os
|
|
|
+# # import re
|
|
|
+# # from subprocess import check_output
|
|
|
+
|
|
|
+# # def update_requirements():
|
|
|
+# # """更新requirements.txt文件,确保所有包都是最新版本"""
|
|
|
+# # with open('requirements.txt', 'r') as file:
|
|
|
+# # packages = file.readlines()
|
|
|
+
|
|
|
+# # # 获取最新版本的包
|
|
|
+# # latest_packages = check_output(
|
|
|
+# # ['pip', 'install', '--upgrade', '--quiet', '--no-deps', '-I'] +
|
|
|
+# # [package.strip() for package in packages if package.strip()],
|
|
|
+# # text=True
|
|
|
+# # ).splitlines()
|
|
|
+
|
|
|
+# # # 解析最新版本信息
|
|
|
+# # latest_packages_dict = dict(re.findall(
|
|
|
+# # r'^(\S*)\s+\((\S*)\)\s+-.*$',
|
|
|
+# # line,
|
|
|
+# # re.MULTILINE
|
|
|
+# # ) for line in latest_packages if line.startswith(('Installing', 'Upgrading')))
|
|
|
+
|
|
|
+# # # 更新requirements.txt文件
|
|
|
+# # with open('requirements.txt', 'w') as file:
|
|
|
+# # for package in packages:
|
|
|
+# # match = re.match(r'^(\S*)\s+.*$', package.strip())
|
|
|
+# # if match and match.group(1) in latest_packages_dict:
|
|
|
+# # file.write(f'{match.group(1)}=={latest_packages_dict[match.group(1)]}\n')
|
|
|
+# # else:
|
|
|
+# # file.write(package.strip() + os.linesep)
|
|
|
+
|
|
|
+# # if __name__ == '__main__':
|
|
|
+# # update_requirements()
|
|
|
+
|
|
|
+
|
|
|
+# import api_key, os, openai
|
|
|
+
|
|
|
+# openai.api_key = api_key.openai_key
|
|
|
+# # openai.api_key = "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"
|
|
|
+# # openai.api_key = "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"
|
|
|
+
|
|
|
+# # #你申请的openai的api key
|
|
|
+# a = os.environ["OPENAI_API_KEY"] = api_key.openai_key
|
|
|
+
|
|
|
+# list_new_pdf = []
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="assistants",
|
|
|
+# extra_query={"order":"asc"}
|
|
|
+# )
|
|
|
+
|
|
|
+
|
|
|
+# bytes = 0
|
|
|
+# for item in list.data:
|
|
|
+# list_new_pdf.append(item)
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+
|
|
|
+# total1 = bytes/1024/1024/1024
|
|
|
+
|
|
|
+# print("assistants:")
|
|
|
+# print(len(list.data))
|
|
|
+# print(str(total1) + "G")
|
|
|
+# print('')
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="assistants",
|
|
|
+# extra_query={"order":"asc","after":(list.data)[-1].id}
|
|
|
+# )
|
|
|
+
|
|
|
+# bytes = 0
|
|
|
+# for item in list.data:
|
|
|
+# list_new_pdf.append(item)
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+
|
|
|
+# total2 = bytes/1024/1024/1024
|
|
|
+
|
|
|
+# print("assistants:")
|
|
|
+# print(len(list.data))
|
|
|
+# print(str(total2) + "G")
|
|
|
+# print('')
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="assistants",
|
|
|
+# extra_query={"order":"asc","after":(list.data)[-1].id}
|
|
|
+# )
|
|
|
+
|
|
|
+# bytes = 0
|
|
|
+# for item in list.data:
|
|
|
+# list_new_pdf.append(item)
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+
|
|
|
+# total3 = bytes/1024/1024/1024
|
|
|
+
|
|
|
+# print("assistants:")
|
|
|
+# print(len(list.data))
|
|
|
+# print(str(total3) + "G")
|
|
|
+# print('')
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="assistants",
|
|
|
+# extra_query={"order":"asc","after":(list.data)[-1].id}
|
|
|
+# )
|
|
|
+
|
|
|
+# bytes = 0
|
|
|
+# for item in list.data:
|
|
|
+# list_new_pdf.append(item)
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+
|
|
|
+# total4 = bytes/1024/1024/1024
|
|
|
+
|
|
|
+# print("assistants:")
|
|
|
+# print(len(list.data))
|
|
|
+# print(str(total4) + "G")
|
|
|
+# print('')
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="assistants",
|
|
|
+# extra_query={"order":"asc","after":(list.data)[-1].id}
|
|
|
+# )
|
|
|
+
|
|
|
+# bytes = 0
|
|
|
+# for item in list.data:
|
|
|
+# list_new_pdf.append(item)
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+
|
|
|
+# total5 = bytes/1024/1024/1024
|
|
|
+
|
|
|
+# print("assistants:")
|
|
|
+# print(len(list.data))
|
|
|
+# print(str(total5) + "G")
|
|
|
+# print('')
|
|
|
+
|
|
|
+# # print(str(bytes/1024/1024/1024) + "G")
|
|
|
+# print(len(list_new_pdf))
|
|
|
+# print(total1 + total2 + total3 + total4 + total5)
|
|
|
+
|
|
|
+# # for item in list_new_pdf:
|
|
|
+# # print(item)
|
|
|
+
|
|
|
+# list_new_pdf_new = []
|
|
|
+# bytes = 0
|
|
|
+# for item in list_new_pdf:
|
|
|
+# if ".ppt" in item.filename or ".pptx" in item.filename:
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+# list_new_pdf_new.append(item)
|
|
|
+
|
|
|
+# print(str(bytes/1024/1024/1024) + "G")
|
|
|
+# print(len(list_new_pdf_new))
|
|
|
+# # print(list_new_pdf_new[3])
|
|
|
+# pdf = []
|
|
|
+
|
|
|
+# for item in list_new_pdf_new:
|
|
|
+# if item.bytes >= 52428800:
|
|
|
+# pdf.append(item)
|
|
|
+
|
|
|
+# bytes = 0
|
|
|
+# for item in pdf:
|
|
|
+# print(item)
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+
|
|
|
+# print(str(bytes/1024/1024/1024) + "G")
|
|
|
+# print(len(pdf))
|
|
|
+
|
|
|
+
|
|
|
+# for item in pdf:
|
|
|
+# # if item.id in list_new_pdf:
|
|
|
+# openai.files.delete(item.id)
|
|
|
+
|
|
|
+
|
|
|
+# list_new_pdf_new = []
|
|
|
+# bytes = 0
|
|
|
+# for item in list_new_pdf:
|
|
|
+# if ".doc" in item.filename or ".docx" in item.filename:
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+# list_new_pdf_new.append(item)
|
|
|
+
|
|
|
+# print(str(bytes/1024/1024/1024) + "G")
|
|
|
+# print(len(list_new_pdf_new))
|
|
|
+
|
|
|
+# list_new_pdf_new = []
|
|
|
+# bytes = 0
|
|
|
+# for item in list_new_pdf:
|
|
|
+# if ".xls" in item.filename or ".xlsx" in item.filename:
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+# list_new_pdf_new.append(item)
|
|
|
+
|
|
|
+# print(str(bytes/1024/1024/1024) + "G")
|
|
|
+# print(len(list_new_pdf_new))
|
|
|
+
|
|
|
+# list_new_pdf = []
|
|
|
+# bytes = 0
|
|
|
+# for item in list.data:
|
|
|
+# if ".xlsx" in item.filename:
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+# list_new_pdf.append(item)
|
|
|
+
|
|
|
+# print(str(bytes/1024/1024/1024) + "G")
|
|
|
+# print(len(list_new_pdf))
|
|
|
+
|
|
|
+# list = []
|
|
|
+# bytes = 0
|
|
|
+# for i in range(16):
|
|
|
+# bytes = bytes + list_new[i].bytes
|
|
|
+# print(list_new[i])
|
|
|
+# print('')
|
|
|
+# list.append(list_new[i])
|
|
|
+
|
|
|
+# print(list)
|
|
|
+
|
|
|
+# for item in list_new_pdf:
|
|
|
+# if item.id == "file-1Mbqss6qX6GPnTKM6izCx1":
|
|
|
+# print(item)
|
|
|
+
|
|
|
+# deleteList = ["file-9uYPz7MWTpjgsUhond2pdFVc","file-U7oU81MT34NTeHaZu6dv2yhq","file-nUyGEdScnyLCuRZ3a2vLCY6B","file-Uy8H9ePoGspsXyTwWUwwhAlK"]
|
|
|
+
|
|
|
+# for item in list_new_pdf:
|
|
|
+# # if item.id in list_new_pdf:
|
|
|
+# openai.files.delete(item.id)
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="assistants_output",
|
|
|
+# extra_query={"order":"desc"}
|
|
|
+# )
|
|
|
+# # print(list)
|
|
|
+# bytes = 0
|
|
|
+# for item in list.data:
|
|
|
+# if item.bytes != None:
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+# print("assistants_output:")
|
|
|
+# print(len(list.data))
|
|
|
+# print(str(bytes/1024/1024/1024) + "G")
|
|
|
+# print('')
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="batch",
|
|
|
+# extra_query={"order":"desc"}
|
|
|
+# )
|
|
|
+# bytes = 0
|
|
|
+# for item in list.data:
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+# print("batch:")
|
|
|
+# print(len(list.data))
|
|
|
+# print(str(bytes/1024/1024/1024) + "G")
|
|
|
+# print('')
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="batch_output",
|
|
|
+# extra_query={"order":"desc"}
|
|
|
+# )
|
|
|
+# print("batch_output:")
|
|
|
+# print(len(list.data))
|
|
|
+# print('')
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="fine-tune",
|
|
|
+# extra_query={"order":"desc"}
|
|
|
+# )
|
|
|
+# bytes = 0
|
|
|
+# for item in list.data:
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+# print("fine-tune:")
|
|
|
+# print(len(list.data))
|
|
|
+# print(str(bytes/1024/1024/1024) + "G")
|
|
|
+# print('')
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="fine-tune-results",
|
|
|
+# extra_query={"order":"desc"}
|
|
|
+# )
|
|
|
+# print("fine-tune-results:")
|
|
|
+# print(len(list.data))
|
|
|
+# print('')
|
|
|
+
|
|
|
+# list = openai.files.list(
|
|
|
+# purpose="vision",
|
|
|
+# extra_query={"order":"desc"}
|
|
|
+# )
|
|
|
+# bytes = 0
|
|
|
+# for item in list.data:
|
|
|
+# bytes = bytes + item.bytes
|
|
|
+# print("vision:")
|
|
|
+# print(len(list.data))
|
|
|
+# print(str(bytes/1024/1024/1024) + "G")
|
|
|
+
|
|
|
+
|
|
|
+import logging
|
|
|
+import os
|
|
|
+from pathlib import Path
|
|
|
+import time
|
|
|
+import openai
|
|
|
+from openai import AssistantEventHandler
|
|
|
+from openai.types.beta import AssistantStreamEvent
|
|
|
+from openai.types.beta.assistant_stream_event import ThreadMessageInProgress
|
|
|
+from openai.types.beta.threads.message import Message
|
|
|
+from openai.types.beta.threads.runs import ToolCall, ToolCallDelta
|
|
|
+
|
|
|
+base_url = "https://assistantapi.cocorobo.cn/api/v1"
|
|
|
+api_key = "cocorobo-xjw-admin"
|
|
|
+client = openai.OpenAI(base_url=base_url, api_key=api_key)
|
|
|
+
|
|
|
+
|
|
|
+class EventHandler(openai.AssistantEventHandler):
|
|
|
+ def __init__(self) -> None:
|
|
|
+ super().__init__()
|
|
|
+
|
|
|
+ def on_tool_call_created(self, tool_call: ToolCall) -> None:
|
|
|
+ logging.info("=====> tool call created: %s\n", tool_call)
|
|
|
+
|
|
|
+ def on_tool_call_delta(self, delta: ToolCallDelta, snapshot: ToolCall) -> None:
|
|
|
+ logging.info("=====> tool call delta")
|
|
|
+ logging.info("delta : %s", delta)
|
|
|
+ logging.info("snapshot: %s\n", snapshot)
|
|
|
+
|
|
|
+ def on_tool_call_done(self, tool_call: ToolCall) -> None:
|
|
|
+ logging.info("=====> tool call done: %s\n", tool_call)
|
|
|
+ self.tool_call = tool_call
|
|
|
+
|
|
|
+ def on_message_created(self, message: Message) -> None:
|
|
|
+ logging.info("=====> message created: %s\n", message)
|
|
|
+
|
|
|
+ def on_message_delta(self, delta, snapshot: Message) -> None:
|
|
|
+ logging.info("=====> message delta")
|
|
|
+ logging.info("=====> delta : %s", delta)
|
|
|
+ logging.info("=====> snapshot: %s\n", snapshot)
|
|
|
+
|
|
|
+ def on_message_done(self, message: Message) -> None:
|
|
|
+ logging.info("=====> message done: %s\n", message)
|
|
|
+
|
|
|
+ def on_text_created(self, text) -> None:
|
|
|
+ logging.info("=====> text create: %s\n", text)
|
|
|
+
|
|
|
+ def on_text_delta(self, delta, snapshot) -> None:
|
|
|
+ logging.info("=====> text delta")
|
|
|
+ logging.info("delta : %s", delta)
|
|
|
+ logging.info("snapshot: %s\n", snapshot)
|
|
|
+
|
|
|
+ def on_text_done(self, text) -> None:
|
|
|
+ logging.info("text done: %s\n", text)
|
|
|
+
|
|
|
+ def on_event(self, event: AssistantStreamEvent) -> None:
|
|
|
+ if isinstance(event, ThreadMessageInProgress):
|
|
|
+ logging.info("event: %s\n", event)
|
|
|
+
|
|
|
+
|
|
|
+if __name__ == "__main__":
|
|
|
+
|
|
|
+ file_path = os.path.join(os.path.dirname(__file__) + "/test.txt")
|
|
|
+ print(file_path)
|
|
|
+ file = client.files.create(file=Path(file_path), purpose="assistants")
|
|
|
+ print(file)
|
|
|
+
|
|
|
+ assistant = client.beta.assistants.create(
|
|
|
+ name="Assistant Demo",
|
|
|
+ instructions="会议分析师",
|
|
|
+ model="gpt-4o-2024-11-20",
|
|
|
+ tools=[
|
|
|
+ {"type": "file_search"},
|
|
|
+ ],
|
|
|
+ tool_resources={"file_search": {"vector_stores": [{"file_ids": [file.id]}]}},
|
|
|
+ )
|
|
|
+ # assistant = client.beta.assistants.retrieve(assistant_id="67614b38d5f1a0df9dddfba9")
|
|
|
+ print("=====> : %s\n", assistant)
|
|
|
+
|
|
|
+ thread = client.beta.threads.create()
|
|
|
+ print("=====> : %s\n", thread)
|
|
|
+
|
|
|
+ message = client.beta.threads.messages.create(
|
|
|
+ thread_id=thread.id,
|
|
|
+ role="user",
|
|
|
+ content="人工智能核心内容",
|
|
|
+ # attachments=[
|
|
|
+ # {"file_id": "67614b375e4b953d7f07c27a", "tools": [{"type": "file_search"}]}
|
|
|
+ # ]
|
|
|
+ )
|
|
|
+ print("=====> : %s\n", message)
|
|
|
+
|
|
|
+ event_handler = EventHandler()
|
|
|
+ with client.beta.threads.runs.stream(
|
|
|
+ thread_id=thread.id,
|
|
|
+ assistant_id=assistant.id,
|
|
|
+ event_handler=event_handler,
|
|
|
+ extra_body={"stream_options": {"include_usage": True}},
|
|
|
+ ) as stream:
|
|
|
+ stream.until_done()
|
|
|
+ print("aaaaaa")
|
|
|
+
|
|
|
+ # run = client.beta.threads.runs.create(
|
|
|
+ # thread_id=thread.id,
|
|
|
+ # assistant_id=assistant.id,
|
|
|
+ # )
|
|
|
+ # print("=====> : %s\n", run)
|
|
|
+
|
|
|
+ # print("checking assistant status. \n")
|
|
|
+ # while True:
|
|
|
+ # run = client.beta.threads.runs.retrieve(thread_id=thread.id, run_id=run.id)
|
|
|
+ # run_steps = client.beta.threads.runs.steps.list(run_id=run.id, thread_id=thread.id).data
|
|
|
+ # for run_step in run_steps:
|
|
|
+ # print("=====> : %s\n", run_step)
|
|
|
+
|
|
|
+ # if run.status == "completed":
|
|
|
+ # messages = client.beta.threads.messages.list(thread_id=thread.id)
|
|
|
+
|
|
|
+ # print("=====> messages:")
|
|
|
+ # for message in messages:
|
|
|
+ # assert message.content[0].type == "text"
|
|
|
+ # print("%s", {"role": message.role, "message": message.content[0].text.value})
|
|
|
+
|
|
|
+ # # delete asst
|
|
|
+ # client.beta.assistants.delete(assistant.id)
|
|
|
+ # break
|
|
|
+ # elif run.status == "failed":
|
|
|
+ # print("run failed %s\n", run.last_error)
|
|
|
+ # break
|
|
|
+ # else:
|
|
|
+ # print("in progress...\n")
|
|
|
+ # time.sleep(5)
|