이 노트북은 LangChain을 LlamaAPI와 함께 사용하는 방법을 보여줍니다. LlamaAPI는 function calling 지원을 추가한 Llama2의 호스팅 버전입니다.pip install -qU llamaapi
Copy
from llamaapi import LlamaAPI# Replace 'Your_API_Token' with your actual API tokenllama = LlamaAPI("Your_API_Token")
Copy
from langchain_experimental.llms import ChatLlamaAPI
Copy
/Users/harrisonchase/.pyenv/versions/3.9.1/envs/langchain/lib/python3.9/site-packages/deeplake/util/check_latest_version.py:32: UserWarning: A newer version of deeplake (3.6.12) is available. It's recommended that you update to the latest version using `pip install -U deeplake`. warnings.warn(
Copy
model = ChatLlamaAPI(client=llama)
Copy
from langchain.chains import create_tagging_chainschema = { "properties": { "sentiment": { "type": "string", "description": "the sentiment encountered in the passage", }, "aggressiveness": { "type": "integer", "description": "a 0-10 score of how aggressive the passage is", }, "language": {"type": "string", "description": "the language of the passage"}, }}chain = create_tagging_chain(schema, model)