|
@@ -328,19 +328,21 @@ class ThreadRunner:
|
|
|
)
|
|
|
print(self.token_id)
|
|
|
print(token_id)
|
|
|
- if token_id is not None:
|
|
|
- token = TokenService.get_token_by_id(self.session, token_id)
|
|
|
- return LLMBackend(
|
|
|
- base_url=token.llm_base_url, api_key=token.llm_api_key
|
|
|
- )
|
|
|
- else:
|
|
|
- token = {
|
|
|
- "llm_base_url": "https://onehub.cocorobo.cn/v1",
|
|
|
- "llm_api_key": "sk-vTqeBKDC2j6osbGt89A2202dAd1c4fE8B1D294388b569e54",
|
|
|
- }
|
|
|
- return LLMBackend(
|
|
|
- base_url=token.get("llm_base_url"), api_key=token.get("llm_api_key")
|
|
|
- )
|
|
|
+ try:
|
|
|
+ if token_id is not None and len(token_id) > 0:
|
|
|
+ token = TokenService.get_token_by_id(self.session, token_id)
|
|
|
+ return LLMBackend(
|
|
|
+ base_url=token.llm_base_url, api_key=token.llm_api_key
|
|
|
+ )
|
|
|
+ except Exception as e:
|
|
|
+ print(e)
|
|
|
+ token = {
|
|
|
+ "llm_base_url": "https://onehub.cocorobo.cn/v1",
|
|
|
+ "llm_api_key": "sk-vTqeBKDC2j6osbGt89A2202dAd1c4fE8B1D294388b569e54",
|
|
|
+ }
|
|
|
+ return LLMBackend(
|
|
|
+ base_url=token.get("llm_base_url"), api_key=token.get("llm_api_key")
|
|
|
+ )
|
|
|
else:
|
|
|
# init llm backend with llm settings
|
|
|
return LLMBackend(
|