<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for ai-qa-sdk</title>
    <link>https://pypi.org/project/ai-qa-sdk/</link>
    <description>Recent updates to the Python Package Index for ai-qa-sdk</description>
    <language>en</language>    <item>
      <title>0.2.0</title>
      <link>https://pypi.org/project/ai-qa-sdk/0.2.0/</link>
      <description>Universal AI validation SDK — hallucination, factuality, safety checks for any LLM output</description>
<author>jdnaidu15@gmail.com</author>      <pubDate>Thu, 09 Apr 2026 19:18:59 GMT</pubDate>
    </item>    <item>
      <title>0.1.4</title>
      <link>https://pypi.org/project/ai-qa-sdk/0.1.4/</link>
      <description>Universal AI validation SDK — hallucination, factuality, safety checks for any LLM output</description>
<author>jdnaidu15@gmail.com</author>      <pubDate>Thu, 09 Apr 2026 19:14:51 GMT</pubDate>
    </item>    <item>
      <title>0.1.3</title>
      <link>https://pypi.org/project/ai-qa-sdk/0.1.3/</link>
      <description>Universal AI validation SDK — hallucination, factuality, safety checks for any LLM output</description>
<author>jdnaidu15@gmail.com</author>      <pubDate>Thu, 09 Apr 2026 19:12:35 GMT</pubDate>
    </item>    <item>
      <title>0.1.2</title>
      <link>https://pypi.org/project/ai-qa-sdk/0.1.2/</link>
      <description>Universal AI validation SDK — hallucination, factuality, safety checks for any LLM output</description>
<author>jdnaidu15@gmail.com</author>      <pubDate>Thu, 09 Apr 2026 19:08:15 GMT</pubDate>
    </item>    <item>
      <title>0.1.1</title>
      <link>https://pypi.org/project/ai-qa-sdk/0.1.1/</link>
      <description>Universal AI validation SDK — hallucination, factuality, safety checks for any LLM output</description>
<author>jdnaidu15@gmail.com</author>      <pubDate>Thu, 09 Apr 2026 19:06:00 GMT</pubDate>
    </item>    <item>
      <title>0.1.0</title>
      <link>https://pypi.org/project/ai-qa-sdk/0.1.0/</link>
      <description>Universal AI validation SDK — hallucination, factuality, safety checks for any LLM output</description>
<author>support@deepintent.ai</author>      <pubDate>Thu, 09 Apr 2026 18:54:06 GMT</pubDate>
    </item>  </channel>
</rss>