<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for mlx-openai-server</title>
    <link>https://pypi.org/project/mlx-openai-server/</link>
    <description>Recent updates to the Python Package Index for mlx-openai-server</description>
    <language>en</language>    <item>
      <title>1.7.1</title>
      <link>https://pypi.org/project/mlx-openai-server/1.7.1/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sat, 04 Apr 2026 07:36:13 GMT</pubDate>
    </item>    <item>
      <title>1.7.0</title>
      <link>https://pypi.org/project/mlx-openai-server/1.7.0/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sun, 22 Mar 2026 08:42:47 GMT</pubDate>
    </item>    <item>
      <title>1.6.3</title>
      <link>https://pypi.org/project/mlx-openai-server/1.6.3/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sun, 08 Mar 2026 10:20:42 GMT</pubDate>
    </item>    <item>
      <title>1.6.2</title>
      <link>https://pypi.org/project/mlx-openai-server/1.6.2/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Fri, 06 Mar 2026 17:10:11 GMT</pubDate>
    </item>    <item>
      <title>1.6.1</title>
      <link>https://pypi.org/project/mlx-openai-server/1.6.1/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Mon, 23 Feb 2026 00:36:14 GMT</pubDate>
    </item>    <item>
      <title>1.6.0</title>
      <link>https://pypi.org/project/mlx-openai-server/1.6.0/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sun, 22 Feb 2026 10:07:46 GMT</pubDate>
    </item>    <item>
      <title>1.5.3</title>
      <link>https://pypi.org/project/mlx-openai-server/1.5.3/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Thu, 12 Feb 2026 02:28:42 GMT</pubDate>
    </item>    <item>
      <title>1.5.2</title>
      <link>https://pypi.org/project/mlx-openai-server/1.5.2/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sat, 07 Feb 2026 14:00:56 GMT</pubDate>
    </item>    <item>
      <title>1.5.1</title>
      <link>https://pypi.org/project/mlx-openai-server/1.5.1/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Tue, 27 Jan 2026 03:31:42 GMT</pubDate>
    </item>    <item>
      <title>1.5.0</title>
      <link>https://pypi.org/project/mlx-openai-server/1.5.0/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Thu, 15 Jan 2026 03:42:58 GMT</pubDate>
    </item>    <item>
      <title>1.4.2</title>
      <link>https://pypi.org/project/mlx-openai-server/1.4.2/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Tue, 09 Dec 2025 04:25:25 GMT</pubDate>
    </item>    <item>
      <title>1.4.1</title>
      <link>https://pypi.org/project/mlx-openai-server/1.4.1/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Fri, 05 Dec 2025 08:11:24 GMT</pubDate>
    </item>    <item>
      <title>1.4.0</title>
      <link>https://pypi.org/project/mlx-openai-server/1.4.0/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models.</description>
<author>cubist38@gmail.com</author>      <pubDate>Fri, 05 Dec 2025 06:55:32 GMT</pubDate>
    </item>    <item>
      <title>1.3.12</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.12/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Mon, 20 Oct 2025 07:22:35 GMT</pubDate>
    </item>    <item>
      <title>1.3.11</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.11/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sun, 19 Oct 2025 09:12:18 GMT</pubDate>
    </item>    <item>
      <title>1.3.10</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.10/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sun, 19 Oct 2025 09:10:26 GMT</pubDate>
    </item>    <item>
      <title>1.3.9</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.9/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Wed, 15 Oct 2025 02:10:52 GMT</pubDate>
    </item>    <item>
      <title>1.3.8</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.8/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sat, 11 Oct 2025 10:15:05 GMT</pubDate>
    </item>    <item>
      <title>1.3.7</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.7/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Tue, 07 Oct 2025 03:17:03 GMT</pubDate>
    </item>    <item>
      <title>1.3.6</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.6/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Mon, 06 Oct 2025 07:21:53 GMT</pubDate>
    </item>    <item>
      <title>1.3.5</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.5/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Fri, 03 Oct 2025 09:18:10 GMT</pubDate>
    </item>    <item>
      <title>1.3.4</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.4/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Thu, 18 Sep 2025 02:13:30 GMT</pubDate>
    </item>    <item>
      <title>1.3.3</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.3/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sat, 13 Sep 2025 12:09:11 GMT</pubDate>
    </item>    <item>
      <title>1.3.2</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.2/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Mon, 08 Sep 2025 02:20:37 GMT</pubDate>
    </item>    <item>
      <title>1.3.1</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.1/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Tue, 02 Sep 2025 07:27:53 GMT</pubDate>
    </item>    <item>
      <title>1.3.0</title>
      <link>https://pypi.org/project/mlx-openai-server/1.3.0/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Tue, 02 Sep 2025 06:44:50 GMT</pubDate>
    </item>    <item>
      <title>1.2.19</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.19/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Mon, 18 Aug 2025 09:52:46 GMT</pubDate>
    </item>    <item>
      <title>1.2.18</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.18/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sun, 17 Aug 2025 04:12:24 GMT</pubDate>
    </item>    <item>
      <title>1.2.17</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.17/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sat, 16 Aug 2025 09:08:12 GMT</pubDate>
    </item>    <item>
      <title>1.2.16</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.16/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sat, 16 Aug 2025 08:55:30 GMT</pubDate>
    </item>    <item>
      <title>1.2.15</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.15/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Fri, 15 Aug 2025 15:44:21 GMT</pubDate>
    </item>    <item>
      <title>1.2.14</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.14/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Fri, 15 Aug 2025 09:46:33 GMT</pubDate>
    </item>    <item>
      <title>1.2.13</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.13/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Thu, 14 Aug 2025 14:55:27 GMT</pubDate>
    </item>    <item>
      <title>1.2.11</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.11/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Thu, 14 Aug 2025 04:59:30 GMT</pubDate>
    </item>    <item>
      <title>1.2.10</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.10/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Thu, 14 Aug 2025 03:48:02 GMT</pubDate>
    </item>    <item>
      <title>1.2.9</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.9/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Tue, 12 Aug 2025 06:24:53 GMT</pubDate>
    </item>    <item>
      <title>1.2.8</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.8/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Tue, 12 Aug 2025 05:01:15 GMT</pubDate>
    </item>    <item>
      <title>1.2.7</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.7/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Tue, 12 Aug 2025 03:12:30 GMT</pubDate>
    </item>    <item>
      <title>1.2.6</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.6/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Fri, 08 Aug 2025 05:00:51 GMT</pubDate>
    </item>    <item>
      <title>1.2.4</title>
      <link>https://pypi.org/project/mlx-openai-server/1.2.4/</link>
      <description>A high-performance API server that provides OpenAI-compatible endpoints for MLX models. Built with Python and FastAPI, it enables efficient, scalable, and user-friendly local deployment of MLX-based multimodal models with an OpenAI-compatible interface. Supports text, vision, and audio processing capabilities. Perfect for developers looking to run MLX models locally while maintaining compatibility with existing OpenAI-based applications.</description>
<author>cubist38@gmail.com</author>      <pubDate>Sun, 03 Aug 2025 00:52:40 GMT</pubDate>
    </item>  </channel>
</rss>