Source code for towhee.runtime.hub_ops.llm

# Copyright 2023 Zilliz. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import Any
from towhee.runtime.factory import HubOp

# pylint: disable=invalid-name
[docs] class LLM: """ The LLM ops are designed to provide a standard interface for all of them. """ OpenAI: HubOp = HubOp('LLM.OpenAI') """ This operator is implemented with Chat Completion method from `OpenAI <https://platform.openai.com/docs/guides/gpt/chat-completions-api>`_. Please note you need an OpenAI API key to access OpenAI. __init__(self, model_name: str = 'gpt-3.5-turbo', api_key: str = None, **kwargs) model_name(`str`): The model name in string, defaults to 'gpt-3.5-turbo'. api_key(`str`): The OpenAI API key in string, defaults to None. kwargs(`dict`): Other OpenAI parameters such as max_tokens, stream, temperature, etc. __call__(self, messages: List[Dict]) -> str messages(`List[Dict]`): A list of messages to set up chat. Must be a list of dictionaries with key value from "system", "question", "answer". For example, [{"question": "a past question?", "answer": "a past answer."}, {"question": "current question?"}] Returns: The next answer generated by role "assistant". Example: .. code-block:: python from towhee import pipe, ops p = ( pipe.input('messages') .map('messages', 'answer', ops.LLM.OpenAI(api_key=OPENAI_API_KEY)) .output('messages', 'answer') ) messages=[ {'question': 'Who won the world series in 2020?', 'answer': 'The Los Angeles Dodgers won the World Series in 2020.'}, {'question': 'Where was it played?'} ] answer = p(messages).get()[0] """ Ernie: HubOp = HubOp('LLM.Ernie') """ This operator is implemented with Ernie Bot from Baidu. Please note you will need Ernie API key & Secret key to access the service. LLM/Ernie 利用了来自百度的文心一言。请注意,您需要文心一言服务的 `APIKey和SecretKey <https://ai.baidu.com/ai-doc/REFERENCE/Lkru0zoz4>`_ 才能访问该服务. __init__(self, api_key: str = None, secret_key: str = None, **kwargs) api_key(`str`): The Ernie API key in string, defaults to None. If None, it will use the environment variable ERNIE_API_KEY. secret_key(`str`): The Ernie Secret key in string, defaults to None. If None, it will use the environment variable ERNIE_SECRET_KEY. kwargs(`Dict`): Other Ernie parameters such as temperature, etc. __call__(self, messages: List[Dict]) -> str messages(`List[Dict]`): A list of messages to set up chat. Must be a list of dictionaries with key value from "system", "question", "answer". For example, [{"question": "a past question?", "answer": "a past answer."}, {"question": "current question?"}] Returns: The next answer generated by role "assistant". Example: .. code-block:: python from towhee import pipe, ops p = ( pipe.input('messages') .map('messages', 'answer', ops.LLM.Ernie(api_key=ERNIE_API_KEY, secret_key=ERNIE_SECRET_KEY, temperature=0.5)) .output('answer') ) messages=[ {'question': 'Zilliz Cloud 是什么?', 'answer': 'Zilliz Cloud 是一种全托管的向量检索服务。'}, {'question': '它和 Milvus 的关系是什么?'} ] answer = p(messages).get()[0] """ MiniMax: HubOp = HubOp('LLM.MiniMax') """ This operator is implemented with MinMax. Please note you will need MiniMax API Key & Group ID to access the service. LLM/MiniMax 利用了来自 `MiniMax <https://api.minimax.chat/>`_ 的大语言模型服务。请注意, 您需要MiniMax 服务的 API Key 和 Group ID才能访问该服务。 __init__(self, api_key: str = None, group_id: str = None, model: str = 'abab5-chat', **kwargs): api_key(`str`): The MiniMax API key in string, defaults to None. If None, it will use the environment variable MINIMAX_API_KEY. group_id(`str`): The MiniMax group id in string, defaults to None. If None, it will use the environment variable MINIMAX_GROUP_ID. model(`str`): The model used in MiniMax service, defaults to 'abab5-chat'. Visit MiniMax documentation for supported models. kwargs(`Dict`): Other MiniMax parameters such as temperature, etc. __call__(self, messages: List[Dict]) -> str messages(`List[Dict]`): A list of messages to set up chat. Must be a list of dictionaries with key value from "system", "question", "answer". For example, [{"question": "a past question?", "answer": "a past answer."}, {"question": "current question?"}]. It also accepts the orignal MiniMax message format like [{"sender_type": "USER", "text": "a question?"}, {"sender_type": "BOT", "text": "an answer."}] Returns: The next answer generated by role "BOT". Example: .. code-block:: python from towhee import pipe, ops p = ( pipe.input('messages') .map('messages', 'answer', ops.LLM.MiniMAX( api_key=MINIMAX_API_KEY, group_id=MINIMAX_GROUP_ID, temperature=0.5, max_tokens=50, role_meta={ 'user_name': '我', 'bot_name': '工程师' }, )) .output('answer') ) messages=[ {'system': '你是一个资深的软件工程师,善于回答关于科技项目的问题。'}, {'question': 'Zilliz Cloud 是什么?', 'answer': 'Zilliz Cloud 是一种全托管的向量检索服务。'}, {'question': '它和 Milvus 的关系是什么?'} ] answer = p(messages).get()[0] """ DashScope: HubOp = HubOp('LLM.DashScope') """ This operator is implemented with Tongyiqianqwen from DashScope at Alibaba. Please note you will need Dashscope API Key to access the service. LLM/DashScope 利用了来自 `阿里云灵积模型服务的通义千问 <https://dashscope.aliyun.com/>`_ 。请注意, 您需要API KEY才能访问该服务。 __init__(self, api_key: str = None, model: str = 'qwen-v1', **kwargs): api_key(`str`): The DashScope API key in string, defaults to None. If None, it will use the environment variable DashScope_API_KEY. model(`str`): The model used in DashScope service, defaults to 'qwen-v1'. Visit `DashScope Documentation <https://help.aliyun.com/document_detail/613695.html?spm=a2c4g.610268.0.0.1aea6cf0ZkEtCM#BQnl3>`_ for supported models. kwargs(`Dict`): Other DashScope model parameters such as temperature, etc. __call__(self, messages: List[Dict]) -> str messages(`List[Dict]`): A list of messages to set up chat. Must be a list of dictionaries with key value from "system", "question", "answer". For example, [{"question": "a past question?", "answer": "a past answer."}, {"question": "current question?"}]. It also accepts the orignal DashScope message format like [{"user": "a past question?", "bot": "a past answer"}, {"user": "current question?"}] Example: .. code-block:: python from towhee import pipe, ops p = ( pipe.input('messages') .map('messages', 'answer', ops.LLM.DashScope( api_key=DASHSCOPE_API_KEY, temperature=0.5, )) .output('answer') ) messages=[ {'question': 'Zilliz Cloud 是什么?', 'answer': 'Zilliz Cloud 是一种全托管的向量检索服务。'}, {'question': '它和 Milvus 的关系是什么?'} ] answer = p(messages).get()[0] """ SkyChat: HubOp = HubOp('LLM.SkyChat') """ This operator is implemented with SkyChat from Singularity AI. Please note you will need the SkyChat app key & app secret to access the service. LLM/SkyChat 利用了来自 `奇点智源的天工模型服务 <https://openapi.singularity-ai.com/index.html#/>`_ 。 请注意,您需要天工服务的 app key 和 app secret才能访问该服务。 __init__(self, app_key: str = None, app_secret: str = None, api_host: str = 'sky-api.singularity-ai.com', model: str = 'sky-chat-3.5', **kwargs): app_key(`str`): The SkyChat app key in string, defaults to None. If None, it will use the environment variable SKYCHAT_APP_KEY. secret_key(`str`): The SkyChat app secret in string, defaults to None. If None, it will use the environment variable SKYCHAT_APP_SECRET. api_host(`str`): Default is sky-api.singularity-ai.com. model(`str`): The SkyChat model name, defaults to 'sky-chat-3.5'. kwargs(`Dict`): Other SkyChat parameters such as temperature, etc. __call__(self, messages: List[Dict]) -> str messages(`List[Dict]`): A list of messages to set up chat. Must be a list of dictionaries with key value from "question", "answer". For example, [{"question": "a past question?", "answer": "a past answer."}, {"question": "current question?"}]. It also accepts the orignal SkyChat message format like [{"role": "user", "content": "a question?"}, {"role": "bot", "content": "an answer."}] Example: .. code-block:: python from towhee import pipe, ops p = ( pipe.input('messages') .map('messages', 'answer', ops.LLM.SkyChat(app_key=SKYCHAT_APP_KEY, app_secret=SKYCHAT_APP_SECRET, temperature=0.5)) .output('answer') ) messages=[ {'question': 'Zilliz Cloud 是什么?', 'answer': 'Zilliz Cloud 是一种全托管的向量检索服务。'}, {'question': '它和 Milvus 的关系是什么?'} ] answer = p(messages).get()[0] """ ZhipuAI: HubOp = HubOp('LLM.ZhipuAI') """ This operator is implemented with ChatGLM services from Zhipu AI. Please note you will need API Key to access the service. LLM/ZhipuAI 利用了来自 `智谱AI开放平台的大语言模型服务 <https://open.bigmodel.cn/>`_ 。请注意, 您需要API Key才能访问该服务。 __init__(self, model_name: str = 'chatglm_130b', api_key: str = None, **kwargs): model_name(`str`): The Zhipu AI API key in string, defaults to None. If None, it will use the environment variable ZHIPUAI_API_KEY. api_key(`str`): The model used in Zhipu AI service, defaults to 'chatglm_130b'. Visit Zhipu AI documentation for supported models. kwargs(`Dict`): Other ChatGLM parameters such as temperature, etc. __call__(self, messages: List[Dict]) -> str messages(`List[Dict]`): A list of messages to set up chat. Must be a list of dictionaries with key value from "system", "question", "answer". For example, [{"question": "a past question?", "answer": "a past answer."}, {"question": "current question?"}]. It also accepts the orignal ChatGLM message format like [{"role": "user", "content": "a question?"}, {"role": "assistant", "content": "an answer."}] Example: .. code-block:: python from towhee import pipe, ops p = ( pipe.input('messages') .map('messages', 'answer', ops.LLM.ZhipuAI( api_key=ZHIPUAI_API_KEY, model_name='chatglm_130b', # or 'chatglm_6b' temperature=0.5, max_tokens=50, )) .output('answer') ) messages=[ {'system': '你是一个资深的软件工程师,善于回答关于科技项目的问题。'}, {'question': 'Zilliz Cloud 是什么?', 'answer': 'Zilliz Cloud 是一种全托管的向量检索服务。'}, {'question': '它和 Milvus 的关系是什么?'} ] answer = p(messages).get()[0] """ Dolly: HubOp = HubOp('LLM.Dolly') """ This operator uses a pretrained `Dolly <https://github.com/databrickslabs/dolly>`_ to generate response. It will download model from HuggingFace Models. __init__(self, model_name: str = 'databricks/dolly-v2-12b', **kwargs): model_name(`str`): The model name in string, defaults to 'databricks/dolly-v2-12b'. Supported model names: databricks/dolly-v2-12b, databricks/dolly-v2-7b, databricks/dolly-v2-3b, databricks/dolly-v1-6b kwargs(`Dict`): Other Dolly model parameters such as device_map. __call__(self, messages: List[Dict]) -> str messages(`List[Dict]`): A list of messages to set up chat. Must be a list of dictionaries with key value from "system", "question", "answer". For example, [{"question": "a past question?", "answer": "a past answer."}, {"question": "current question?"}] Example: .. code-block:: python from towhee import pipe, ops p = ( pipe.input('question', 'docs', 'history') .map(('question', 'docs', 'history'), 'prompt', ops.prompt.question_answer(llm_name='dolly')) .map('prompt', 'answer', ops.LLM.Dolly()) .output('answer') ) history=[('Who won the world series in 2020?', 'The Los Angeles Dodgers won the World Series in 2020.')] question = 'Where was it played?' answer = p(question, [], history).get()[0] """ def __call__(self, *args: Any, **kwds: Any) -> Any: return HubOp('towhee.LLM')(*args, **kwds)