VM-X AI Langchain Python SDK
Project description
VM-X SDK for Python Langchain
Description
VM-X AI SDK client for Python Langchain
Installation
pip install vm-x-ai-langchain
poetry add vm-x-ai-langchain
Usage
Non-Streaming
from vmxai_langchain import ChatVMX
llm = ChatVMX(
resource="default",
)
messages = [
(
"system",
"You are a helpful translator. Translate the user sentence to French.",
),
("human", "I love programming."),
]
result = llm.invoke(messages)
Streaming
from vmxai_langchain import ChatVMX
llm = ChatVMX(
resource="default",
)
messages = [
(
"system",
"You are a helpful translator. Translate the user sentence to French.",
),
("human", "I love programming."),
]
for chunk in llm.stream(messages):
print(chunk.content, end="", flush=True)
Function Calling
Decorator
from langchain_core.messages import HumanMessage, ToolMessage
from langchain_core.tools import tool
from vmxai_langchain import ChatVMX
@tool
def add(a: int, b: int) -> int:
"""Adds a and b.
Args:
a: first int
b: second int
"""
return a + b
@tool
def multiply(a: int, b: int) -> int:
"""Multiplies a and b.
Args:
a: first int
b: second int
"""
return a * b
tools = [add, multiply]
llm = ChatVMX(
resource="default",
)
llm_with_tools = llm.bind_tools(tools)
query = "What is 3 * 12? Also, what is 11 + 49?"
messages = [HumanMessage(query)]
ai_msg = llm_with_tools.invoke(messages)
messages.append(ai_msg)
for tool_call in ai_msg.tool_calls:
selected_tool = {"add": add, "multiply": multiply}[tool_call["name"].lower()]
tool_output = selected_tool.invoke(tool_call["args"])
messages.append(ToolMessage(tool_output, tool_call_id=tool_call["id"]))
print(llm_with_tools.invoke(messages))
Pydantic
from langchain_core.pydantic_v1 import BaseModel, Field
from vmxai_langchain import ChatVMX
from vmxai_langchain.output_parsers.tools import PydanticToolsParser
# Note that the docstrings here are crucial, as they will be passed along
# to the model along with the class name.
class add(BaseModel):
"""Add two integers together."""
a: int = Field(..., description="First integer")
b: int = Field(..., description="Second integer")
class multiply(BaseModel):
"""Multiply two integers together."""
a: int = Field(..., description="First integer")
b: int = Field(..., description="Second integer")
tools = [add, multiply]
llm = ChatVMX(
resource="default",
)
llm_with_tools = llm.bind_tools(tools) | PydanticToolsParser(tools=[multiply, add])
query = "What is 3 * 12? Also, what is 11 + 49?"
print(llm_with_tools.invoke(query))
Function Calling Streaming
from langchain_core.pydantic_v1 import BaseModel, Field
from vmxai_langchain import ChatVMX
from vmxai_langchain.output_parsers.tools import PydanticToolsParser
# Note that the docstrings here are crucial, as they will be passed along
# to the model along with the class name.
class add(BaseModel):
"""Add two integers together."""
a: int = Field(..., description="First integer")
b: int = Field(..., description="Second integer")
class multiply(BaseModel):
"""Multiply two integers together."""
a: int = Field(..., description="First integer")
b: int = Field(..., description="Second integer")
tools = [add, multiply]
llm = ChatVMX(
resource="default",
)
llm_with_tools = llm.bind_tools(tools) | PydanticToolsParser(tools=[multiply, add])
query = "What is 3 * 12? Also, what is 11 + 49?"
for chunk in llm_with_tools.stream(query):
print(chunk)
Structured Output
from langchain_core.pydantic_v1 import BaseModel, Field
from vmxai_langchain import ChatVMX
class Joke(BaseModel):
setup: str = Field(description="The setup of the joke")
punchline: str = Field(description="The punchline to the joke")
llm = ChatVMX(resource="default")
structured_llm = llm.with_structured_output(Joke, strict=True)
print(structured_llm.invoke("Tell me a joke about cats"))
Limitations
- Async client is not supported.
json_mode
andjson_schema
Structured output are not supported.
Change Log
Project details
Release history Release notifications | RSS feed
Download files
Download the file for your platform. If you're not sure which to choose, learn more about installing packages.
Source Distribution
vm_x_ai_langchain-0.1.0.tar.gz
(17.5 kB
view hashes)
Built Distribution
Close
Hashes for vm_x_ai_langchain-0.1.0-py3-none-any.whl
Algorithm | Hash digest | |
---|---|---|
SHA256 | 0f8d535b25f81c85676b7c6f67a14d5b62d7425c8755fd3ff710b7c1a85aca89 |
|
MD5 | bcad76c44041f4e735bc4ffb223d788a |
|
BLAKE2b-256 | 41f9a6ff6831af32e950169fae528b0453cb7d7a768b9a21d9564009692d34d1 |