Skip to main content

Parea python sdk

Project description

parea-sdk

Build status Dependencies Status Code style: black

Pre-commit Semantic Versions License

Parea python sdk

Installation

pip install -U parea-ai

or install with Poetry

poetry add parea-ai

Getting Started

import os

from dotenv import load_dotenv

from parea import Parea
from parea.schemas.models import Completion, UseDeployedPrompt, CompletionResponse, UseDeployedPromptResponse

load_dotenv()

p = Parea(api_key=os.getenv("PAREA_API_KEY"))

# You will find this deployment_id in the Parea dashboard
deployment_id = '<DEPLOYMENT_ID>'

# Assuming your deployed prompt's message is:
# {"role": "user", "content": "Write a hello world program using {{x}} and the {{y}} framework."}
inputs = {"x": "Golang", "y": "Fiber"}

# You can easily unpack a dictionary into an attrs class
test_completion = Completion(
  **{
    "deployment_id": deployment_id,
    "llm_inputs": inputs,
    "metadata": {"purpose": "testing"}
  }
)

# By passing in my inputs, in addition to the raw message with unfilled variables {{x}} and {{y}}, 
# you we will also get the filled-in prompt:
# {"role": "user", "content": "Write a hello world program using Golang and the Fiber framework."}
test_get_prompt = UseDeployedPrompt(deployment_id=deployment_id, llm_inputs=inputs)


def main():
  completion_response: CompletionResponse = p.completion(data=test_completion)
  print(completion_response)
  deployed_prompt: UseDeployedPromptResponse = p.get_prompt(data=test_get_prompt)
  print("\n\n")
  print(deployed_prompt)


async def main_async():
  completion_response: CompletionResponse = await p.acompletion(data=test_completion)
  print(completion_response)
  deployed_prompt: UseDeployedPromptResponse = await p.aget_prompt(data=test_get_prompt)
  print("\n\n")
  print(deployed_prompt)

Logging results from LLM providers

import os
import time

import openai
from dotenv import load_dotenv

from parea import Parea
from parea.helpers import to_date_and_time_string, gen_trace_id
from parea.parea_logger import parea_logger
from parea.schemas.models import TraceLog, LLMInputs

load_dotenv()

openai.api_key = os.getenv("OPENAI_API_KEY")
p = Parea(api_key=os.getenv("PAREA_API_KEY"))

# define your OpenAI call as you would normally
x = "Golang"
y = "Fiber"
inputs = {"x": x, "y": y}
messages = [
  {"role": "user", "content": f"Write a hello world program using {x} and the {y} framework."},
]
model = "gpt-3.5-turbo"
model_params = {
  "temperature": 0.7,
  "top_p": 1.0,
}
model_config = {"model": model, "messages": messages, "model_params": model_params}
start_time = time.time()
completion = openai.ChatCompletion.create(model=model, messages=messages, **model_params)
output = completion.choices[0].message["content"]
end_time = time.time()

# the TraceLog schema
log_request = TraceLog(
  trace_id=gen_trace_id(),
  start_timestamp=to_date_and_time_string(start_time),
  end_timestamp=to_date_and_time_string(end_time),
  status="success",
  trace_name="Test Log",
  inputs=inputs,
  configuration=LLMInputs(**model_config),
  output=output,
  input_tokens=completion.usage["prompt_tokens"],
  output_tokens=completion.usage["completion_tokens"],
  total_tokens=completion.usage["total_tokens"],
)


def main():
  parea_logger.record_log(data=log_request)


async def main_async():
  await parea_logger.arecord_log(data=log_request)

Open source community features

Ready-to-use Pull Requests templates and several Issue templates.

  • Files such as: LICENSE, CONTRIBUTING.md, CODE_OF_CONDUCT.md, and SECURITY.md are generated automatically.
  • Semantic Versions specification with Release Drafter.

🛡 License

License

This project is licensed under the terms of the Apache Software License 2.0 license. See LICENSE for more details.

📃 Citation

@misc{parea-sdk,
  author = {joel-parea-ai},
  title = {Parea python sdk},
  year = {2023},
  publisher = {GitHub},
  journal = {GitHub repository},
  howpublished = {\url{https://github.com/parea-ai/parea-sdk}}
}

Project details


Release history Release notifications | RSS feed

Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

parea_ai-0.2.7a0.tar.gz (863.8 kB view hashes)

Uploaded Source

Built Distribution

parea_ai-0.2.7a0-py3-none-any.whl (866.2 kB view hashes)

Uploaded Python 3

Supported by

AWS AWS Cloud computing and Security Sponsor Datadog Datadog Monitoring Fastly Fastly CDN Google Google Download Analytics Microsoft Microsoft PSF Sponsor Pingdom Pingdom Monitoring Sentry Sentry Error logging StatusPage StatusPage Status page