<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for lm-proxy</title>
    <link>https://pypi.org/project/lm-proxy/</link>
    <description>Recent updates to the Python Package Index for lm-proxy</description>
    <language>en</language>    <item>
      <title>3.2.2</title>
      <link>https://pypi.org/project/lm-proxy/3.2.2/</link>
      <description>LM-Proxy is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Thu, 02 Apr 2026 13:46:25 GMT</pubDate>
    </item>    <item>
      <title>3.2.1</title>
      <link>https://pypi.org/project/lm-proxy/3.2.1/</link>
      <description>LM-Proxy is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Tue, 31 Mar 2026 19:24:38 GMT</pubDate>
    </item>    <item>
      <title>3.2.0</title>
      <link>https://pypi.org/project/lm-proxy/3.2.0/</link>
      <description>LM-Proxy is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Mon, 30 Mar 2026 17:04:35 GMT</pubDate>
    </item>    <item>
      <title>3.1.0</title>
      <link>https://pypi.org/project/lm-proxy/3.1.0/</link>
      <description>LM-Proxy is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Wed, 25 Mar 2026 18:28:28 GMT</pubDate>
    </item>    <item>
      <title>3.0.2</title>
      <link>https://pypi.org/project/lm-proxy/3.0.2/</link>
      <description>LM-Proxy is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Thu, 19 Feb 2026 13:45:46 GMT</pubDate>
    </item>    <item>
      <title>3.0.1</title>
      <link>https://pypi.org/project/lm-proxy/3.0.1/</link>
      <description>LM-Proxy is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Tue, 10 Feb 2026 20:42:57 GMT</pubDate>
    </item>    <item>
      <title>3.0.0</title>
      <link>https://pypi.org/project/lm-proxy/3.0.0/</link>
      <description>LM-Proxy is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Thu, 05 Feb 2026 11:32:47 GMT</pubDate>
    </item>    <item>
      <title>3.0.0.dev1</title>
      <link>https://pypi.org/project/lm-proxy/3.0.0.dev1/</link>
      <description>LM-Proxy is an OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitaliy.in</author>      <pubDate>Tue, 20 Jan 2026 16:52:02 GMT</pubDate>
    </item>    <item>
      <title>2.1.1</title>
      <link>https://pypi.org/project/lm-proxy/2.1.1/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Thu, 20 Nov 2025 12:56:23 GMT</pubDate>
    </item>    <item>
      <title>2.1.0</title>
      <link>https://pypi.org/project/lm-proxy/2.1.0/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Sun, 02 Nov 2025 22:55:28 GMT</pubDate>
    </item>    <item>
      <title>2.0.0</title>
      <link>https://pypi.org/project/lm-proxy/2.0.0/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Sun, 26 Oct 2025 17:36:28 GMT</pubDate>
    </item>    <item>
      <title>1.1.0</title>
      <link>https://pypi.org/project/lm-proxy/1.1.0/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Wed, 15 Oct 2025 15:47:39 GMT</pubDate>
    </item>    <item>
      <title>1.0.0</title>
      <link>https://pypi.org/project/lm-proxy/1.0.0/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Wed, 15 Oct 2025 12:01:57 GMT</pubDate>
    </item>    <item>
      <title>0.4.0</title>
      <link>https://pypi.org/project/lm-proxy/0.4.0/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Tue, 14 Oct 2025 19:51:22 GMT</pubDate>
    </item>    <item>
      <title>0.3.0</title>
      <link>https://pypi.org/project/lm-proxy/0.3.0/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Thu, 09 Oct 2025 18:09:24 GMT</pubDate>
    </item>    <item>
      <title>0.2.2</title>
      <link>https://pypi.org/project/lm-proxy/0.2.2/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Wed, 08 Oct 2025 14:48:49 GMT</pubDate>
    </item>    <item>
      <title>0.2.1</title>
      <link>https://pypi.org/project/lm-proxy/0.2.1/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Thu, 28 Aug 2025 00:32:57 GMT</pubDate>
    </item>    <item>
      <title>0.2.0</title>
      <link>https://pypi.org/project/lm-proxy/0.2.0/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Wed, 27 Aug 2025 17:48:11 GMT</pubDate>
    </item>    <item>
      <title>0.0.3</title>
      <link>https://pypi.org/project/lm-proxy/0.0.3/</link>
      <description>&#34;LM-Proxy&#34; is OpenAI-compatible http proxy server for inferencing various LLMs capable of working with Google, Anthropic, OpenAI APIs, local PyTorch inference, etc.</description>
<author>mail@vitalii.in</author>      <pubDate>Sat, 24 May 2025 17:26:04 GMT</pubDate>
    </item>    <item>
      <title>0.0.2</title>
      <link>https://pypi.org/project/lm-proxy/0.0.2/</link>
      <description>LLM inference proxy server</description>
<author>mail@vitalii.in</author>      <pubDate>Sat, 24 May 2025 16:37:17 GMT</pubDate>
    </item>    <item>
      <title>0.0.1</title>
      <link>https://pypi.org/project/lm-proxy/0.0.1/</link>
      <description>LLM inference proxy server</description>
<author>mail@vitalii.in</author>      <pubDate>Sat, 24 May 2025 16:06:02 GMT</pubDate>
    </item>  </channel>
</rss>