<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for vllm-mcp-server</title>
    <link>https://pypi.org/project/vllm-mcp-server/</link>
    <description>Recent updates to the Python Package Index for vllm-mcp-server</description>
    <language>en</language>    <item>
      <title>0.1.4</title>
      <link>https://pypi.org/project/vllm-mcp-server/0.1.4/</link>
      <description>MCP server for vLLM - expose vLLM capabilities to AI assistants</description>
      <pubDate>Sat, 03 Jan 2026 02:12:55 GMT</pubDate>
    </item>    <item>
      <title>0.1.3</title>
      <link>https://pypi.org/project/vllm-mcp-server/0.1.3/</link>
      <description>MCP server for vLLM - expose vLLM capabilities to AI assistants</description>
      <pubDate>Tue, 09 Dec 2025 19:01:09 GMT</pubDate>
    </item>    <item>
      <title>0.1.2</title>
      <link>https://pypi.org/project/vllm-mcp-server/0.1.2/</link>
      <description>MCP server for vLLM - expose vLLM capabilities to AI assistants</description>
      <pubDate>Tue, 09 Dec 2025 03:00:39 GMT</pubDate>
    </item>    <item>
      <title>0.1.1</title>
      <link>https://pypi.org/project/vllm-mcp-server/0.1.1/</link>
      <description>MCP server for vLLM - expose vLLM capabilities to AI assistants</description>
      <pubDate>Mon, 08 Dec 2025 19:30:48 GMT</pubDate>
    </item>  </channel>
</rss>