<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for mps-bitsandbytes</title>
    <link>https://pypi.org/project/mps-bitsandbytes/</link>
    <description>Recent updates to the Python Package Index for mps-bitsandbytes</description>
    <language>en</language>    <item>
      <title>0.7.0</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.7.0/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Sun, 08 Feb 2026 17:04:08 GMT</pubDate>
    </item>    <item>
      <title>0.6.1</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.6.1/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Mon, 02 Feb 2026 23:18:47 GMT</pubDate>
    </item>    <item>
      <title>0.6.0</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.6.0/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Mon, 02 Feb 2026 22:14:39 GMT</pubDate>
    </item>    <item>
      <title>0.5.1</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.5.1/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Mon, 02 Feb 2026 09:47:33 GMT</pubDate>
    </item>    <item>
      <title>0.5.0</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.5.0/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Sun, 01 Feb 2026 15:28:42 GMT</pubDate>
    </item>    <item>
      <title>0.4.9</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.4.9/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Sun, 01 Feb 2026 14:54:07 GMT</pubDate>
    </item>    <item>
      <title>0.4.8</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.4.8/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Sat, 31 Jan 2026 14:45:33 GMT</pubDate>
    </item>    <item>
      <title>0.4.7</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.4.7/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Sat, 31 Jan 2026 14:03:38 GMT</pubDate>
    </item>    <item>
      <title>0.4.6</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.4.6/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Sat, 31 Jan 2026 12:16:55 GMT</pubDate>
    </item>    <item>
      <title>0.4.5</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.4.5/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Sat, 31 Jan 2026 11:55:31 GMT</pubDate>
    </item>    <item>
      <title>0.4.4</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.4.4/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Sat, 31 Jan 2026 11:40:43 GMT</pubDate>
    </item>    <item>
      <title>0.4.3</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.4.3/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Sat, 31 Jan 2026 00:44:44 GMT</pubDate>
    </item>    <item>
      <title>0.4.2</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.4.2/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Sat, 31 Jan 2026 00:33:40 GMT</pubDate>
    </item>    <item>
      <title>0.4.1</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.4.1/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Fri, 30 Jan 2026 23:57:18 GMT</pubDate>
    </item>    <item>
      <title>0.4.0</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.4.0/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Fri, 30 Jan 2026 17:32:36 GMT</pubDate>
    </item>    <item>
      <title>0.3.0</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.3.0/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Fri, 30 Jan 2026 17:02:07 GMT</pubDate>
    </item>    <item>
      <title>0.2.0</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.2.0/</link>
      <description>NF4/FP4/FP8/INT8 quantization for PyTorch on Apple Silicon with Metal GPU acceleration</description>
      <pubDate>Fri, 30 Jan 2026 16:04:02 GMT</pubDate>
    </item>    <item>
      <title>0.1.2</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.1.2/</link>
      <description>8-bit and 4-bit quantization for PyTorch on Apple Silicon (M1/M2/M3/M4)</description>
      <pubDate>Fri, 30 Jan 2026 09:31:50 GMT</pubDate>
    </item>    <item>
      <title>0.1.1</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.1.1/</link>
      <description>8-bit and 4-bit quantization for PyTorch on Apple Silicon (M1/M2/M3/M4)</description>
      <pubDate>Thu, 29 Jan 2026 13:28:34 GMT</pubDate>
    </item>    <item>
      <title>0.1.0</title>
      <link>https://pypi.org/project/mps-bitsandbytes/0.1.0/</link>
      <description>8-bit and 4-bit quantization for PyTorch on Apple Silicon (M1/M2/M3/M4)</description>
      <pubDate>Thu, 29 Jan 2026 12:50:35 GMT</pubDate>
    </item>  </channel>
</rss>