<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for isat-tuner</title>
    <link>https://pypi.org/project/isat-tuner/</link>
    <description>Recent updates to the Python Package Index for isat-tuner</description>
    <language>en</language>    <item>
      <title>0.8.5</title>
      <link>https://pypi.org/project/isat-tuner/0.8.5/</link>
      <description>ISAT: Inference Stack Auto-Tuner — CLI toolkit to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Mon, 27 Apr 2026 10:13:06 GMT</pubDate>
    </item>    <item>
      <title>0.8.4</title>
      <link>https://pypi.org/project/isat-tuner/0.8.4/</link>
      <description>ISAT: Inference Stack Auto-Tuner — CLI toolkit to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Mon, 27 Apr 2026 10:02:19 GMT</pubDate>
    </item>    <item>
      <title>0.8.3</title>
      <link>https://pypi.org/project/isat-tuner/0.8.3/</link>
      <description>ISAT: Inference Stack Auto-Tuner — CLI toolkit to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Mon, 27 Apr 2026 09:40:47 GMT</pubDate>
    </item>    <item>
      <title>0.8.2</title>
      <link>https://pypi.org/project/isat-tuner/0.8.2/</link>
      <description>ISAT: Inference Stack Auto-Tuner — CLI toolkit to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Mon, 27 Apr 2026 09:25:28 GMT</pubDate>
    </item>    <item>
      <title>0.8.1</title>
      <link>https://pypi.org/project/isat-tuner/0.8.1/</link>
      <description>ISAT: Inference Stack Auto-Tuner — CLI toolkit to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Mon, 27 Apr 2026 04:43:19 GMT</pubDate>
    </item>    <item>
      <title>0.8.0</title>
      <link>https://pypi.org/project/isat-tuner/0.8.0/</link>
      <description>ISAT: Inference Stack Auto-Tuner — CLI toolkit to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Mon, 27 Apr 2026 04:27:33 GMT</pubDate>
    </item>    <item>
      <title>0.7.8</title>
      <link>https://pypi.org/project/isat-tuner/0.7.8/</link>
      <description>ISAT: Inference Stack Auto-Tuner — CLI toolkit to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 14:41:19 GMT</pubDate>
    </item>    <item>
      <title>0.7.7</title>
      <link>https://pypi.org/project/isat-tuner/0.7.7/</link>
      <description>ISAT: Inference Stack Auto-Tuner — 55-command CLI to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU. By Sudheer Ibrahim Daniel Devu.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 12:20:38 GMT</pubDate>
    </item>    <item>
      <title>0.7.6</title>
      <link>https://pypi.org/project/isat-tuner/0.7.6/</link>
      <description>ISAT: Inference Stack Auto-Tuner — 55-command CLI to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU. By Sudheer Ibrahim Daniel Devu.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 10:17:22 GMT</pubDate>
    </item>    <item>
      <title>0.7.5</title>
      <link>https://pypi.org/project/isat-tuner/0.7.5/</link>
      <description>ISAT: Inference Stack Auto-Tuner — 55-command CLI to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU. By Sudheer Ibrahim Daniel Devu.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 09:38:27 GMT</pubDate>
    </item>    <item>
      <title>0.7.4</title>
      <link>https://pypi.org/project/isat-tuner/0.7.4/</link>
      <description>ISAT: Inference Stack Auto-Tuner — 55-command CLI to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU. By Sudheer Ibrahim Daniel Devu.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 09:21:04 GMT</pubDate>
    </item>    <item>
      <title>0.7.3</title>
      <link>https://pypi.org/project/isat-tuner/0.7.3/</link>
      <description>ISAT: Inference Stack Auto-Tuner — 55-command CLI to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU. By Sudheer Ibrahim Daniel Devu.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 09:16:54 GMT</pubDate>
    </item>    <item>
      <title>0.7.2</title>
      <link>https://pypi.org/project/isat-tuner/0.7.2/</link>
      <description>ISAT: Inference Stack Auto-Tuner — 55-command CLI to auto-tune, profile, prune, deploy, and monitor ONNX models on any GPU. By Sudheer Ibrahim Daniel Devu.</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 09:10:57 GMT</pubDate>
    </item>    <item>
      <title>0.7.1</title>
      <link>https://pypi.org/project/isat-tuner/0.7.1/</link>
      <description>Inference Stack Auto-Tuner -- automatically find the fastest inference configuration for any ONNX model on any GPU</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 08:54:11 GMT</pubDate>
    </item>    <item>
      <title>0.7.0</title>
      <link>https://pypi.org/project/isat-tuner/0.7.0/</link>
      <description>Inference Stack Auto-Tuner -- automatically find the fastest inference configuration for any ONNX model on any GPU</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 08:45:24 GMT</pubDate>
    </item>    <item>
      <title>0.6.0</title>
      <link>https://pypi.org/project/isat-tuner/0.6.0/</link>
      <description>Inference Stack Auto-Tuner -- automatically find the fastest inference configuration for any ONNX model on any GPU</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 08:26:27 GMT</pubDate>
    </item>    <item>
      <title>0.5.0</title>
      <link>https://pypi.org/project/isat-tuner/0.5.0/</link>
      <description>Inference Stack Auto-Tuner -- automatically find the fastest inference configuration for any ONNX model on any GPU</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 08:02:04 GMT</pubDate>
    </item>    <item>
      <title>0.4.0</title>
      <link>https://pypi.org/project/isat-tuner/0.4.0/</link>
      <description>Inference Stack Auto-Tuner -- automatically find the fastest inference configuration for any ONNX model on any GPU</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 07:40:59 GMT</pubDate>
    </item>    <item>
      <title>0.3.0</title>
      <link>https://pypi.org/project/isat-tuner/0.3.0/</link>
      <description>Inference Stack Auto-Tuner -- automatically find the fastest inference configuration for any ONNX model on any GPU</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 07:23:54 GMT</pubDate>
    </item>    <item>
      <title>0.2.0</title>
      <link>https://pypi.org/project/isat-tuner/0.2.0/</link>
      <description>Inference Stack Auto-Tuner -- automatically find the fastest inference configuration for any ONNX model on any GPU</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 07:13:06 GMT</pubDate>
    </item>    <item>
      <title>0.1.1</title>
      <link>https://pypi.org/project/isat-tuner/0.1.1/</link>
      <description>Inference Stack Auto-Tuner -- automatically find the fastest inference configuration for any ONNX model on any GPU</description>
<author>sudheerdevu4work@gmail.com</author>      <pubDate>Fri, 24 Apr 2026 07:00:56 GMT</pubDate>
    </item>    <item>
      <title>0.1.0</title>
      <link>https://pypi.org/project/isat-tuner/0.1.0/</link>
      <description>Inference Stack Auto-Tuner -- automatically find the fastest inference configuration for any ONNX model on any GPU</description>
<author>sudhdevu@amd.com</author>      <pubDate>Fri, 24 Apr 2026 06:57:25 GMT</pubDate>
    </item>  </channel>
</rss>