<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for autojudge-evaluate</title>
    <link>https://pypi.org/project/autojudge-evaluate/</link>
    <description>Recent updates to the Python Package Index for autojudge-evaluate</description>
    <language>en</language>    <item>
      <title>0.3.14</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.3.14/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Tue, 21 Apr 2026 03:44:35 GMT</pubDate>
    </item>    <item>
      <title>0.3.13</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.3.13/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Fri, 17 Apr 2026 02:28:27 GMT</pubDate>
    </item>    <item>
      <title>0.3.12</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.3.12/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Thu, 16 Apr 2026 15:54:40 GMT</pubDate>
    </item>    <item>
      <title>0.3.11</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.3.11/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Thu, 09 Apr 2026 15:46:59 GMT</pubDate>
    </item>    <item>
      <title>0.3.10</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.3.10/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Thu, 02 Apr 2026 17:42:41 GMT</pubDate>
    </item>    <item>
      <title>0.3.9</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.3.9/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Thu, 02 Apr 2026 15:50:20 GMT</pubDate>
    </item>    <item>
      <title>0.3.8</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.3.8/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Thu, 02 Apr 2026 09:37:18 GMT</pubDate>
    </item>    <item>
      <title>0.3.7</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.3.7/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Sun, 29 Mar 2026 12:43:57 GMT</pubDate>
    </item>    <item>
      <title>0.3.2</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.3.2/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Sun, 15 Feb 2026 18:10:44 GMT</pubDate>
    </item>    <item>
      <title>0.2.1</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.2.1/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Sun, 15 Feb 2026 18:05:18 GMT</pubDate>
    </item>    <item>
      <title>0.3.1</title>
      <link>https://pypi.org/project/autojudge-evaluate/0.3.1/</link>
      <description>Evaluation tools for TREC AutoJudge: meta-evaluate, qrel-evaluate, leaderboard statistics</description>
      <pubDate>Mon, 09 Feb 2026 18:01:19 GMT</pubDate>
    </item>  </channel>
</rss>