<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for aither-kvcache</title>
    <link>https://pypi.org/project/aither-kvcache/</link>
    <description>Recent updates to the Python Package Index for aither-kvcache</description>
    <language>en</language>    <item>
      <title>2.1.0</title>
      <link>https://pypi.org/project/aither-kvcache/2.1.0/</link>
      <description>Near-optimal KV cache compression for LLM inference — TurboQuant (vector quantization) + TriAttention (spectral compression)</description>
      <pubDate>Wed, 15 Apr 2026 00:35:53 GMT</pubDate>
    </item>    <item>
      <title>2.0.1</title>
      <link>https://pypi.org/project/aither-kvcache/2.0.1/</link>
      <description>Near-optimal KV cache compression for LLM inference — TurboQuant (vector quantization) + TriAttention (spectral compression)</description>
      <pubDate>Tue, 07 Apr 2026 23:19:28 GMT</pubDate>
    </item>    <item>
      <title>2.0.0</title>
      <link>https://pypi.org/project/aither-kvcache/2.0.0/</link>
      <description>Near-optimal KV cache compression for LLM inference — TurboQuant (vector quantization) + TriAttention (spectral compression)</description>
      <pubDate>Tue, 07 Apr 2026 18:56:22 GMT</pubDate>
    </item>    <item>
      <title>1.3.1</title>
      <link>https://pypi.org/project/aither-kvcache/1.3.1/</link>
      <description>Near-optimal KV cache quantization + graph-aware eviction for LLM inference</description>
      <pubDate>Sun, 05 Apr 2026 20:11:16 GMT</pubDate>
    </item>    <item>
      <title>1.3.0</title>
      <link>https://pypi.org/project/aither-kvcache/1.3.0/</link>
      <description>Near-optimal KV cache quantization + graph-aware eviction for LLM inference</description>
      <pubDate>Sun, 05 Apr 2026 13:58:57 GMT</pubDate>
    </item>    <item>
      <title>1.2.1</title>
      <link>https://pypi.org/project/aither-kvcache/1.2.1/</link>
      <description>Near-optimal KV cache quantization + graph-aware eviction for LLM inference</description>
      <pubDate>Sun, 05 Apr 2026 13:12:44 GMT</pubDate>
    </item>    <item>
      <title>1.2.0</title>
      <link>https://pypi.org/project/aither-kvcache/1.2.0/</link>
      <description>Near-optimal KV cache quantization + graph-aware eviction for LLM inference</description>
      <pubDate>Sun, 05 Apr 2026 03:43:58 GMT</pubDate>
    </item>    <item>
      <title>1.1.1</title>
      <link>https://pypi.org/project/aither-kvcache/1.1.1/</link>
      <description>Near-optimal KV cache quantization + graph-aware eviction for LLM inference</description>
      <pubDate>Sat, 04 Apr 2026 16:17:49 GMT</pubDate>
    </item>    <item>
      <title>1.1.0</title>
      <link>https://pypi.org/project/aither-kvcache/1.1.0/</link>
      <description>Near-optimal KV cache quantization + graph-aware eviction for LLM inference</description>
      <pubDate>Fri, 03 Apr 2026 20:17:36 GMT</pubDate>
    </item>    <item>
      <title>0.9.2</title>
      <link>https://pypi.org/project/aither-kvcache/0.9.2/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Thu, 02 Apr 2026 21:28:53 GMT</pubDate>
    </item>    <item>
      <title>0.9.1</title>
      <link>https://pypi.org/project/aither-kvcache/0.9.1/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Thu, 02 Apr 2026 21:00:08 GMT</pubDate>
    </item>    <item>
      <title>0.8.1</title>
      <link>https://pypi.org/project/aither-kvcache/0.8.1/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Wed, 01 Apr 2026 00:01:24 GMT</pubDate>
    </item>    <item>
      <title>0.8.0</title>
      <link>https://pypi.org/project/aither-kvcache/0.8.0/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Tue, 31 Mar 2026 15:32:15 GMT</pubDate>
    </item>    <item>
      <title>0.7.0</title>
      <link>https://pypi.org/project/aither-kvcache/0.7.0/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Mon, 30 Mar 2026 19:46:05 GMT</pubDate>
    </item>    <item>
      <title>0.6.0</title>
      <link>https://pypi.org/project/aither-kvcache/0.6.0/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Mon, 30 Mar 2026 19:22:56 GMT</pubDate>
    </item>    <item>
      <title>0.5.0</title>
      <link>https://pypi.org/project/aither-kvcache/0.5.0/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Mon, 30 Mar 2026 17:09:56 GMT</pubDate>
    </item>    <item>
      <title>0.4.0</title>
      <link>https://pypi.org/project/aither-kvcache/0.4.0/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Sat, 28 Mar 2026 07:13:49 GMT</pubDate>
    </item>    <item>
      <title>0.3.0</title>
      <link>https://pypi.org/project/aither-kvcache/0.3.0/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Fri, 27 Mar 2026 23:19:16 GMT</pubDate>
    </item>    <item>
      <title>0.2.0</title>
      <link>https://pypi.org/project/aither-kvcache/0.2.0/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Fri, 27 Mar 2026 04:00:48 GMT</pubDate>
    </item>    <item>
      <title>0.1.0</title>
      <link>https://pypi.org/project/aither-kvcache/0.1.0/</link>
      <description>Near-optimal KV cache quantization for LLM inference (arXiv:2504.19874)</description>
      <pubDate>Fri, 27 Mar 2026 03:33:02 GMT</pubDate>
    </item>  </channel>
</rss>