123456789101112131415161718192021222324252627282930313233343536 |
- import os
- # from langchain_community.chat_models import ChatOpenAI
- from langchain_openai import ChatOpenAI
- from langchain_community.llms import Tongyi
- from langchain_community.chat_models.tongyi import ChatTongyi
- class LLM():
- def __init__(self, model_name='qwen'):
- if model_name == 'deepseek':
- self.llm = self.deepseek_llm()
- elif model_name == 'qwen':
- self.llm = self.qwen_llm()
- else:
- raise ValueError("Unsupported model name")
- def deepseek_llm(self):
- llm = ChatOpenAI(
- model='deepseek-coder',
- openai_api_key='sk-7a15d12a3e254dd0a3408f1544c72da5',
- openai_api_base='https://api.deepseek.com',
- max_tokens=1024
- )
- return llm
- def qwen_llm(self):
- os.environ["DASHSCOPE_API_KEY"] = "sk-86d4622141d74e9a8d7c38ee873c4d91"
- llm = ChatTongyi(model='qwen-turbo')
- return llm
- def get_llm(self):
- return self.llm
- class QwenTurboTongyi(Tongyi):
- mode_name = 'qwen-plus'
|