<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for cachellm-py</title>
    <link>https://pypi.org/project/cachellm-py/</link>
    <description>Recent updates to the Python Package Index for cachellm-py</description>
    <language>en</language>    <item>
      <title>0.2.0</title>
      <link>https://pypi.org/project/cachellm-py/0.2.0/</link>
      <description>Auto-optimize LLM prompt caching. Save 60-90% on Claude, GPT &amp; Gemini API costs.</description>
      <pubDate>Fri, 24 Apr 2026 07:00:20 GMT</pubDate>
    </item>  </channel>
</rss>