<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for ollm</title>
    <link>https://pypi.org/project/ollm/</link>
    <description>Recent updates to the Python Package Index for ollm</description>
    <language>en</language>    <item>
      <title>1.0.3</title>
      <link>https://pypi.org/project/ollm/1.0.3/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Fri, 31 Oct 2025 02:37:23 GMT</pubDate>
    </item>    <item>
      <title>1.0.2</title>
      <link>https://pypi.org/project/ollm/1.0.2/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Tue, 28 Oct 2025 21:30:00 GMT</pubDate>
    </item>    <item>
      <title>1.0.1</title>
      <link>https://pypi.org/project/ollm/1.0.1/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Mon, 20 Oct 2025 19:19:25 GMT</pubDate>
    </item>    <item>
      <title>1.0.0</title>
      <link>https://pypi.org/project/ollm/1.0.0/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Mon, 13 Oct 2025 18:31:56 GMT</pubDate>
    </item>    <item>
      <title>0.5.2</title>
      <link>https://pypi.org/project/ollm/0.5.2/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Wed, 08 Oct 2025 06:06:44 GMT</pubDate>
    </item>    <item>
      <title>0.5.0</title>
      <link>https://pypi.org/project/ollm/0.5.0/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Wed, 01 Oct 2025 07:19:23 GMT</pubDate>
    </item>    <item>
      <title>0.4.2</title>
      <link>https://pypi.org/project/ollm/0.4.2/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Sun, 28 Sep 2025 05:09:34 GMT</pubDate>
    </item>    <item>
      <title>0.4.0</title>
      <link>https://pypi.org/project/ollm/0.4.0/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Fri, 19 Sep 2025 06:17:37 GMT</pubDate>
    </item>    <item>
      <title>0.3.0</title>
      <link>https://pypi.org/project/ollm/0.3.0/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Wed, 10 Sep 2025 04:09:53 GMT</pubDate>
    </item>    <item>
      <title>0.2.1</title>
      <link>https://pypi.org/project/ollm/0.2.1/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Thu, 04 Sep 2025 07:50:40 GMT</pubDate>
    </item>    <item>
      <title>0.1.3</title>
      <link>https://pypi.org/project/ollm/0.1.3/</link>
      <description>LLM Inference for Large-Context Offline Workloads</description>
<author>anuarsh@ailabs.us</author>      <pubDate>Wed, 27 Aug 2025 05:47:21 GMT</pubDate>
    </item>  </channel>
</rss>