An (unofficial) python native client for easy interaction with MiniMax Open Platform
Reason this release was yanked:
Python 3.8 compatibility issue
Project description
MiniMax Python Client
An (unofficial) python native client for easy interaction with MiniMax Open Platform
The current implementation includes the following official APIs offered by MiniMax:
- ChatCompletion v2
- Embeddings
- File
- Finetune
Prerequisites
- Python >= 3.8
- pip (or any other tool that does the same job)
- Internet connection
- An API KEY acquired from MiniMax Open Platform
Quick Start
1. Install the package
pip install minimax-client
2. Import the package and invoke the client
2.1 Sync call
from minimax_client import MiniMax
client = MiniMax(api_key="<YOUR_API_KEY>")
response = client.chat.completions.create(
messages=[
{
"role": "user",
"content": "1 + 1 equals: ",
}
]
)
print(response.choices[0].message.content)
2.2 Sync call with streaming
from minimax_client import MiniMax
client = MiniMax(api_key="<YOUR_API_KEY>")
stream = client.chat.completions.create(
messages=[
{
"role": "user",
"content": "What is the term GPT short for?",
}
],
stream=True,
)
for chunk in stream:
print(chunk.choices[0].delta.content if chunk.choices[0].delta else "", end="")
2.3 Sync call with tools, stream enabled
from minimax_client import MiniMax
client = MiniMax(api_key="<YOUR_API_KEY>")
stream = client.chat.completions.create(
model="abab5.5-chat",
messages=[
{
"role": "system",
"content": "You are a helpful assistant",
},
{
"role": "user",
"content": "What's the weather like in Log Angeles right now?",
},
{
"role": "assistant",
"tool_calls": [
{
"id": "call_function_2936815621",
"type": "function",
"function": {
"name": "get_current_weather",
"arguments": '{"location": "LogAngeles"}',
},
}
],
},
{
"role": "tool",
"tool_call_id": "call_function_2936815621",
"content": "LogAngeles / Sunny / 51°F / Wind: East 5 mph",
},
],
stream=True,
tool_choice="auto",
tools=[
{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Retrieve the current weather of given location",
"parameters": '{"type": "object", "properties": {"location": {"type": "string", "description": "Name of a city, eg. Paris, London"}}, "required": ["location"]}',
},
}
],
)
for chunk in stream:
print(chunk.choices[0].delta.content if chunk.choices[0].delta else "", end="")
# It's currently sunny in Log Angeles, with a temperature of 51°F and wind from the east at 5 mph.
2.4 Async call
import asyncio
from minimax_client import AsyncMiniMax
async def demo():
client = AsyncMiniMax(api_key="<YOUR_API_KEY>")
response = await client.chat.completions.create(
messages=[
{
"role": "user",
"content": "1 + 1 equals: ",
}
]
)
print(response.choices[0].message.content)
asyncio.run(demo())
2.5 Async call with streaming
import asyncio
from minimax_client import AsyncMiniMax
async def demo():
client = AsyncMiniMax(api_key="<YOUR_API_KEY>")
stream = await client.chat.completions.create(
messages=[
{
"role": "user",
"content": "What is the term GPT short for?",
}
],
stream=True,
)
async for chunk in stream:
print(chunk.choices[0].delta.content if chunk.choices[0].delta else "", end="")
asyncio.run(demo())
2.6 Sync call for embeddings
from minimax_client import MiniMax
client = MiniMax(api_key="<YOUR_API_KEY>")
response = client.embeddings.create(
input=["Hello world!", "Nice to meet you!"],
target="db",
)
print(response.vectors[0][:10])
print(response.vectors[1][:10])
2.7 Async call for embeddings
import asyncio
from minimax_client import AsyncMiniMax
async def demo():
client = AsyncMiniMax(api_key="<YOUR_API_KEY>")
response = await client.embeddings.create(
input=["Hello async world!", "Nice to meet you async!"],
target="query",
)
print(response.vectors[0][:10])
print(response.vectors[1][:10])
asyncio.run(demo())
2.8 Sync call for files
from minimax_client import MiniMax
client = MiniMax(api_key="<YOUR_API_KEY>")
resp = client.files.create(filepath="sample.txt", purpose="retrieval")
print(resp.file.file_id)
resp = client.files.list(purpose="retrieval")
print(resp.files[0].file_id)
resp = client.files.retrieve(file_id=resp.files[0].file_id)
print(resp.file.bytes)
resp = client.files.delete(file_id=resp.file.file_id)
print(resp.file_id)
2.9 Async call for files
import asyncio
from minimax_client import AsyncMiniMax
async def demo():
client = AsyncMiniMax(api_key="<YOUR_API_KEY>")
resp = await client.files.create(filepath="sample.txt", purpose="retrieval")
print(resp.file.file_id)
resp = await client.files.list(purpose="retrieval")
print(resp.files[0].file_id)
resp = await client.files.retrieve(file_id=resp.files[0].file_id)
print(resp.file.bytes)
resp = await client.files.delete(file_id=resp.file.file_id)
print(resp.file_id)
asyncio.run(demo())
2.10 Sync call for files
from minimax_client import MiniMax
client = MiniMax(api_key="<YOUR_API_KEY>")
resp = client.fine_tuning.jobs.create(
model="abab5.5s-chat-240123", training_file=..., suffix="test"
)
print(resp.id)
print(resp.fine_tuned_model)
resp = client.fine_tuning.jobs.list(limit=5)
print(resp.job_list[0])
resp = client.model.list()
print(resp.model_list[0])
resp = client.model.retrieve(model="ft:abab5.5s-chat-240123_XXXXXXXXXXXXX:test")
print(resp.model.id)
Project details
Release history Release notifications | RSS feed
Download files
Download the file for your platform. If you're not sure which to choose, learn more about installing packages.
Source Distribution
minimax_client-0.4.0.tar.gz
(13.7 kB
view hashes)
Built Distribution
Close
Hashes for minimax_client-0.4.0-py3-none-any.whl
Algorithm | Hash digest | |
---|---|---|
SHA256 | 2b84c79f187b68f63ac92c8fde3fbfad0e4a99edaab12ab54808337e893f4602 |
|
MD5 | a3efa57aeb8a459edd14e48cbd7f7b15 |
|
BLAKE2b-256 | bf395cb31da34958f3f784547c8f10e9cd9bc70e481d717382bad846a4eeb662 |