<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for llm-proxy-server</title>
    <link>https://pypi.org/project/llm-proxy-server/</link>
    <description>Recent updates to the Python Package Index for llm-proxy-server</description>
    <language>en</language>    <item>
      <title>3.2.2</title>
      <link>https://pypi.org/project/llm-proxy-server/3.2.2/</link>
      <description>LLM Proxy Server is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Thu, 02 Apr 2026 13:46:23 GMT</pubDate>
    </item>    <item>
      <title>3.2.1</title>
      <link>https://pypi.org/project/llm-proxy-server/3.2.1/</link>
      <description>LLM Proxy Server is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Tue, 31 Mar 2026 19:24:35 GMT</pubDate>
    </item>    <item>
      <title>3.2.0</title>
      <link>https://pypi.org/project/llm-proxy-server/3.2.0/</link>
      <description>LLM Proxy Server is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Mon, 30 Mar 2026 17:04:32 GMT</pubDate>
    </item>    <item>
      <title>3.1.0</title>
      <link>https://pypi.org/project/llm-proxy-server/3.1.0/</link>
      <description>LLM Proxy Server is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Wed, 25 Mar 2026 18:28:27 GMT</pubDate>
    </item>    <item>
      <title>3.0.2</title>
      <link>https://pypi.org/project/llm-proxy-server/3.0.2/</link>
      <description>LLM Proxy Server is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Thu, 19 Feb 2026 13:45:44 GMT</pubDate>
    </item>    <item>
      <title>3.0.1</title>
      <link>https://pypi.org/project/llm-proxy-server/3.0.1/</link>
      <description>LLM Proxy Server is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Tue, 10 Feb 2026 20:42:56 GMT</pubDate>
    </item>    <item>
      <title>3.0.0</title>
      <link>https://pypi.org/project/llm-proxy-server/3.0.0/</link>
      <description>LLM Proxy Server is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Thu, 05 Feb 2026 11:32:43 GMT</pubDate>
    </item>    <item>
      <title>3.0.0.dev1</title>
      <link>https://pypi.org/project/llm-proxy-server/3.0.0.dev1/</link>
      <description>LLM Proxy Server is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Tue, 20 Jan 2026 16:51:59 GMT</pubDate>
    </item>    <item>
      <title>2.1.1</title>
      <link>https://pypi.org/project/llm-proxy-server/2.1.1/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Thu, 20 Nov 2025 12:56:20 GMT</pubDate>
    </item>    <item>
      <title>2.1.0</title>
      <link>https://pypi.org/project/llm-proxy-server/2.1.0/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Sun, 02 Nov 2025 22:55:25 GMT</pubDate>
    </item>    <item>
      <title>2.0.0</title>
      <link>https://pypi.org/project/llm-proxy-server/2.0.0/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Sun, 26 Oct 2025 17:36:25 GMT</pubDate>
    </item>    <item>
      <title>1.1.0</title>
      <link>https://pypi.org/project/llm-proxy-server/1.1.0/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Wed, 15 Oct 2025 15:47:37 GMT</pubDate>
    </item>    <item>
      <title>1.0.0</title>
      <link>https://pypi.org/project/llm-proxy-server/1.0.0/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Wed, 15 Oct 2025 12:01:55 GMT</pubDate>
    </item>    <item>
      <title>0.4.0</title>
      <link>https://pypi.org/project/llm-proxy-server/0.4.0/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Tue, 14 Oct 2025 19:51:21 GMT</pubDate>
    </item>    <item>
      <title>0.3.0</title>
      <link>https://pypi.org/project/llm-proxy-server/0.3.0/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Thu, 09 Oct 2025 18:09:23 GMT</pubDate>
    </item>    <item>
      <title>0.2.2</title>
      <link>https://pypi.org/project/llm-proxy-server/0.2.2/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Wed, 08 Oct 2025 14:48:48 GMT</pubDate>
    </item>    <item>
      <title>0.2.1</title>
      <link>https://pypi.org/project/llm-proxy-server/0.2.1/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Thu, 28 Aug 2025 00:32:55 GMT</pubDate>
    </item>    <item>
      <title>0.2.0</title>
      <link>https://pypi.org/project/llm-proxy-server/0.2.0/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Wed, 27 Aug 2025 17:48:09 GMT</pubDate>
    </item>    <item>
      <title>0.0.3</title>
      <link>https://pypi.org/project/llm-proxy-server/0.0.3/</link>
      <description>&#34;LLM Proxy Server&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Sat, 24 May 2025 17:26:01 GMT</pubDate>
    </item>    <item>
      <title>0.0.2</title>
      <link>https://pypi.org/project/llm-proxy-server/0.0.2/</link>
      <description>LLM inference proxy server</description>
<author>mail@vitalii.in</author>      <pubDate>Sat, 24 May 2025 16:37:16 GMT</pubDate>
    </item>    <item>
      <title>0.0.1</title>
      <link>https://pypi.org/project/llm-proxy-server/0.0.1/</link>
      <description>LLM inference proxy server</description>
<author>mail@vitalii.in</author>      <pubDate>Sat, 24 May 2025 16:08:45 GMT</pubDate>
    </item>  </channel>
</rss>