<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for localcoder</title>
    <link>https://pypi.org/project/localcoder/</link>
    <description>Recent updates to the Python Package Index for localcoder</description>
    <language>en</language>    <item>
      <title>0.4.0</title>
      <link>https://pypi.org/project/localcoder/0.4.0/</link>
      <description>Local AI coding agent with image generation, MCP tools, session persistence. Runs 100% on GPU — Gemma 4 + Flux. No API keys.</description>
      <pubDate>Sun, 12 Apr 2026 15:31:31 GMT</pubDate>
    </item>    <item>
      <title>0.3.0</title>
      <link>https://pypi.org/project/localcoder/0.3.0/</link>
      <description>Local AI coding agent — auto-installs, auto-serves, zero config. Works with Gemma 4, Qwen 3.5, and any model via llama.cpp or Ollama.</description>
      <pubDate>Wed, 08 Apr 2026 05:17:42 GMT</pubDate>
    </item>    <item>
      <title>0.2.1</title>
      <link>https://pypi.org/project/localcoder/0.2.1/</link>
      <description>Local AI coding agent — auto-installs, auto-serves, zero config. Works with Gemma 4, Qwen 3.5, and any model via llama.cpp or Ollama.</description>
      <pubDate>Tue, 07 Apr 2026 18:53:55 GMT</pubDate>
    </item>    <item>
      <title>0.2.0</title>
      <link>https://pypi.org/project/localcoder/0.2.0/</link>
      <description>Local AI coding agent — auto-installs, auto-serves, zero config. Works with Gemma 4, Qwen 3.5, and any model via llama.cpp or Ollama.</description>
      <pubDate>Tue, 07 Apr 2026 18:47:44 GMT</pubDate>
    </item>    <item>
      <title>0.1.1</title>
      <link>https://pypi.org/project/localcoder/0.1.1/</link>
      <description>Local AI coding agent — auto-installs, auto-serves, zero config. Works with Gemma 4, Qwen 3.5, and any model via llama.cpp or Ollama.</description>
      <pubDate>Tue, 07 Apr 2026 17:00:36 GMT</pubDate>
    </item>    <item>
      <title>0.1.0</title>
      <link>https://pypi.org/project/localcoder/0.1.0/</link>
      <description>Local AI coding agent — auto-installs, auto-serves, zero config. Works with Gemma 4, Qwen 3.5, and any model via llama.cpp or Ollama.</description>
      <pubDate>Mon, 06 Apr 2026 21:23:19 GMT</pubDate>
    </item>  </channel>
</rss>