<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for oprel</title>
    <link>https://pypi.org/project/oprel/</link>
    <description>Recent updates to the Python Package Index for oprel</description>
    <language>en</language>    <item>
      <title>0.4.3</title>
      <link>https://pypi.org/project/oprel/0.4.3/</link>
      <description>[fixed version of 0.4.2]Oprel is a high-performance Python library for running large language models locally. It provides a production-ready runtime with advanced memory management, hybrid offloading, and full multimodal support.</description>
<author>info@skyrootsolutions.com</author>      <pubDate>Sun, 22 Mar 2026 17:43:00 GMT</pubDate>
    </item>    <item>
      <title>0.4.2</title>
      <link>https://pypi.org/project/oprel/0.4.2/</link>
      <description>Oprel is a high-performance Python library for running large language models locally. It provides a production-ready runtime with advanced memory management, hybrid offloading, and full multimodal support.</description>
<author>info@skyrootsolutions.com</author>      <pubDate>Sun, 22 Mar 2026 16:27:09 GMT</pubDate>
    </item>    <item>
      <title>0.4.1</title>
      <link>https://pypi.org/project/oprel/0.4.1/</link>
      <description>[Fixed version of 0.4.0]Oprel is a high-performance Python library for running large language models locally. It provides a production-ready runtime with advanced memory management, hybrid offloading, and full multimodal support.</description>
<author>info@skyrootsolutions.com</author>      <pubDate>Fri, 13 Mar 2026 05:21:55 GMT</pubDate>
    </item>    <item>
      <title>0.4.0</title>
      <link>https://pypi.org/project/oprel/0.4.0/</link>
      <description>Oprel is a high-performance Python library for running large language models locally. It provides a production-ready runtime with advanced memory management, hybrid offloading, and full multimodal support.</description>
<author>info@skyrootsolutions.com</author>      <pubDate>Fri, 13 Mar 2026 04:05:36 GMT</pubDate>
    </item>    <item>
      <title>0.3.5</title>
      <link>https://pypi.org/project/oprel/0.3.5/</link>
      <description>Oprel is a high-performance Python library for running large language models locally. It provides a production-ready runtime with advanced memory management, hybrid offloading, and full multimodal support.</description>
<author>info@skyrootsolutions.com</author>      <pubDate>Sun, 15 Feb 2026 12:42:37 GMT</pubDate>
    </item>    <item>
      <title>0.3.4</title>
      <link>https://pypi.org/project/oprel/0.3.4/</link>
      <description>Oprel is a high-performance Python library for running large language models locally. It provides a production-ready runtime with advanced memory management, hybrid offloading, and full multimodal support.</description>
<author>info@skyrootsolutions.com</author>      <pubDate>Sun, 15 Feb 2026 10:01:13 GMT</pubDate>
    </item>    <item>
      <title>0.3.3</title>
      <link>https://pypi.org/project/oprel/0.3.3/</link>
      <description>Oprel is a high-performance Python library for running large language models locally. It provides a production-ready runtime with advanced memory management, hybrid offloading, and full multimodal support.</description>
<author>info@skyrootsolutions.com</author>      <pubDate>Mon, 09 Feb 2026 05:53:58 GMT</pubDate>
    </item>    <item>
      <title>0.3.2</title>
      <link>https://pypi.org/project/oprel/0.3.2/</link>
      <description>Oprel is a high-performance Python library for running large language models locally. It provides a production-ready runtime with advanced memory management, hybrid offloading, and full multimodal support.</description>
<author>info@skyrootsolutions.com</author>      <pubDate>Sun, 08 Feb 2026 11:10:44 GMT</pubDate>
    </item>    <item>
      <title>0.3.1</title>
      <link>https://pypi.org/project/oprel/0.3.1/</link>
      <description>Run LLMs locally with one line of Python. Ollama alternative with server mode, conversation memory, and 50+ model aliases. The SQLite of AI.</description>
<author>info@skyrootsolutions.com</author>      <pubDate>Sat, 07 Feb 2026 13:49:38 GMT</pubDate>
    </item>    <item>
      <title>0.2.3</title>
      <link>https://pypi.org/project/oprel/0.2.3/</link>
      <description>Run LLMs locally with one line of Python. Ollama alternative with server mode, conversation memory, and 50+ model aliases. The SQLite of AI.</description>
<author>tragulragul@gmail.com</author>      <pubDate>Wed, 04 Feb 2026 09:36:00 GMT</pubDate>
    </item>    <item>
      <title>0.2.2</title>
      <link>https://pypi.org/project/oprel/0.2.2/</link>
      <description>Run LLMs locally with one line of Python. Ollama alternative with server mode, conversation memory, and 50+ model aliases. The SQLite of AI.</description>
<author>tragulragul@gmail.com</author>      <pubDate>Sat, 31 Jan 2026 13:06:53 GMT</pubDate>
    </item>    <item>
      <title>0.2.1</title>
      <link>https://pypi.org/project/oprel/0.2.1/</link>
      <description>Run LLMs locally with one line of Python. Ollama alternative with server mode, conversation memory, and 50+ model aliases. The SQLite of AI.</description>
<author>tragulragul@gmail.com</author>      <pubDate>Mon, 26 Jan 2026 04:38:25 GMT</pubDate>
    </item>    <item>
      <title>0.2.0</title>
      <link>https://pypi.org/project/oprel/0.2.0/</link>
      <description>Run LLMs locally with one line of Python. Ollama alternative with server mode, conversation memory, and 50+ model aliases. The SQLite of AI.</description>
<author>tragulragul@gmail.com</author>      <pubDate>Sun, 25 Jan 2026 13:25:48 GMT</pubDate>
    </item>    <item>
      <title>0.1.3</title>
      <link>https://pypi.org/project/oprel/0.1.3/</link>
      <description>Local-first AI runtime - The SQLite of LLMs</description>
<author>tragulragul@gmail.com</author>      <pubDate>Sat, 24 Jan 2026 19:41:04 GMT</pubDate>
    </item>    <item>
      <title>0.1.2</title>
      <link>https://pypi.org/project/oprel/0.1.2/</link>
      <description>Local-first AI runtime - The SQLite of LLMs</description>
<author>tragulragul@gmail.com</author>      <pubDate>Sat, 24 Jan 2026 19:21:23 GMT</pubDate>
    </item>    <item>
      <title>0.1.1</title>
      <link>https://pypi.org/project/oprel/0.1.1/</link>
      <description>Local-first AI runtime - The SQLite of LLMs</description>
<author>tragulragul@gmail.com</author>      <pubDate>Sat, 24 Jan 2026 18:57:43 GMT</pubDate>
    </item>    <item>
      <title>0.1.0</title>
      <link>https://pypi.org/project/oprel/0.1.0/</link>
      <description>Local-first AI runtime - The SQLite of LLMs</description>
<author>you@example.com</author>      <pubDate>Sat, 24 Jan 2026 18:29:33 GMT</pubDate>
    </item>  </channel>
</rss>