<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for trillim</title>
    <link>https://pypi.org/project/trillim/</link>
    <description>Recent updates to the Python Package Index for trillim</description>
    <language>en</language>    <item>
      <title>0.10.2</title>
      <link>https://pypi.org/project/trillim/0.10.2/</link>
      <description>The fastest inference framework to run AI on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Fri, 01 May 2026 00:17:47 GMT</pubDate>
    </item>    <item>
      <title>0.10.1</title>
      <link>https://pypi.org/project/trillim/0.10.1/</link>
      <description>The fastest inference framework to run AI on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Tue, 28 Apr 2026 05:10:45 GMT</pubDate>
    </item>    <item>
      <title>0.10.0</title>
      <link>https://pypi.org/project/trillim/0.10.0/</link>
      <description>The fastest inference framework to run AI on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Mon, 27 Apr 2026 23:54:29 GMT</pubDate>
    </item>    <item>
      <title>0.9.0</title>
      <link>https://pypi.org/project/trillim/0.9.0/</link>
      <description>The fastest inference framework to run AI on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Fri, 17 Apr 2026 20:26:45 GMT</pubDate>
    </item>    <item>
      <title>0.8.1</title>
      <link>https://pypi.org/project/trillim/0.8.1/</link>
      <description>The fastest inference framework to run AI on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Thu, 16 Apr 2026 19:39:51 GMT</pubDate>
    </item>    <item>
      <title>0.8.0</title>
      <link>https://pypi.org/project/trillim/0.8.0/</link>
      <description>The fastest inference framework to run AI on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Fri, 10 Apr 2026 00:16:16 GMT</pubDate>
    </item>    <item>
      <title>0.7.2</title>
      <link>https://pypi.org/project/trillim/0.7.2/</link>
      <description>The fastest inference framework to run AI on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Sun, 29 Mar 2026 22:53:26 GMT</pubDate>
    </item>    <item>
      <title>0.7.1</title>
      <link>https://pypi.org/project/trillim/0.7.1/</link>
      <description>The fastest inference framework to run AI on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Sat, 28 Mar 2026 02:44:08 GMT</pubDate>
    </item>    <item>
      <title>0.7.0</title>
      <link>https://pypi.org/project/trillim/0.7.0/</link>
      <description>The fastest inference framework to run AI on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Sat, 28 Mar 2026 02:28:16 GMT</pubDate>
    </item>    <item>
      <title>0.6.0</title>
      <link>https://pypi.org/project/trillim/0.6.0/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Mon, 16 Mar 2026 05:49:23 GMT</pubDate>
    </item>    <item>
      <title>0.5.3</title>
      <link>https://pypi.org/project/trillim/0.5.3/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Tue, 03 Mar 2026 23:22:24 GMT</pubDate>
    </item>    <item>
      <title>0.5.2</title>
      <link>https://pypi.org/project/trillim/0.5.2/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Tue, 03 Mar 2026 20:02:45 GMT</pubDate>
    </item>    <item>
      <title>0.5.1</title>
      <link>https://pypi.org/project/trillim/0.5.1/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Tue, 03 Mar 2026 20:00:25 GMT</pubDate>
    </item>    <item>
      <title>0.5.0</title>
      <link>https://pypi.org/project/trillim/0.5.0/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Tue, 03 Mar 2026 04:47:43 GMT</pubDate>
    </item>    <item>
      <title>0.2.6</title>
      <link>https://pypi.org/project/trillim/0.2.6/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Fri, 27 Feb 2026 20:23:49 GMT</pubDate>
    </item>    <item>
      <title>0.2.5</title>
      <link>https://pypi.org/project/trillim/0.2.5/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Tue, 24 Feb 2026 05:57:27 GMT</pubDate>
    </item>    <item>
      <title>0.1.5</title>
      <link>https://pypi.org/project/trillim/0.1.5/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Wed, 18 Feb 2026 03:39:51 GMT</pubDate>
    </item>    <item>
      <title>0.1.4</title>
      <link>https://pypi.org/project/trillim/0.1.4/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Wed, 18 Feb 2026 03:22:34 GMT</pubDate>
    </item>    <item>
      <title>0.1.3</title>
      <link>https://pypi.org/project/trillim/0.1.3/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Tue, 17 Feb 2026 17:32:02 GMT</pubDate>
    </item>    <item>
      <title>0.1.2</title>
      <link>https://pypi.org/project/trillim/0.1.2/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Tue, 17 Feb 2026 16:47:09 GMT</pubDate>
    </item>    <item>
      <title>0.1.1</title>
      <link>https://pypi.org/project/trillim/0.1.1/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Tue, 17 Feb 2026 16:40:02 GMT</pubDate>
    </item>    <item>
      <title>0.1.0</title>
      <link>https://pypi.org/project/trillim/0.1.0/</link>
      <description>The fastest inference framework to run BitNet models on CPUs</description>
<author>vineetv314@gmail.com</author>      <pubDate>Tue, 17 Feb 2026 16:26:41 GMT</pubDate>
    </item>  </channel>
</rss>