jack 3 週間 前
コミット
552218ccef
1 ファイル変更15 行追加13 行削除
  1. 15 13
      app/core/runner/thread_runner.py

+ 15 - 13
app/core/runner/thread_runner.py

@@ -328,19 +328,21 @@ class ThreadRunner:
             )
             print(self.token_id)
             print(token_id)
-            if token_id is not None:
-                token = TokenService.get_token_by_id(self.session, token_id)
-                return LLMBackend(
-                    base_url=token.llm_base_url, api_key=token.llm_api_key
-                )
-            else:
-                token = {
-                    "llm_base_url": "https://onehub.cocorobo.cn/v1",
-                    "llm_api_key": "sk-vTqeBKDC2j6osbGt89A2202dAd1c4fE8B1D294388b569e54",
-                }
-                return LLMBackend(
-                    base_url=token.get("llm_base_url"), api_key=token.get("llm_api_key")
-                )
+            try:
+                if token_id is not None and len(token_id) > 0:
+                    token = TokenService.get_token_by_id(self.session, token_id)
+                    return LLMBackend(
+                        base_url=token.llm_base_url, api_key=token.llm_api_key
+                    )
+            except Exception as e:
+                print(e)
+            token = {
+                "llm_base_url": "https://onehub.cocorobo.cn/v1",
+                "llm_api_key": "sk-vTqeBKDC2j6osbGt89A2202dAd1c4fE8B1D294388b569e54",
+            }
+            return LLMBackend(
+                base_url=token.get("llm_base_url"), api_key=token.get("llm_api_key")
+            )
         else:
             # init llm backend with llm settings
             return LLMBackend(