<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for transformer-attention-hooker</title>
    <link>https://pypi.org/project/transformer-attention-hooker/</link>
    <description>Recent updates to the Python Package Index for transformer-attention-hooker</description>
    <language>en</language>    <item>
      <title>0.1.1a1</title>
      <link>https://pypi.org/project/transformer-attention-hooker/0.1.1a1/</link>
      <description>A lightweight, robust utility for extracting and visualizing attention weights from PyTorch Transformer models.</description>
<author>donghwee.yoon@gmail.com</author>      <pubDate>Mon, 01 Dec 2025 02:20:34 GMT</pubDate>
    </item>    <item>
      <title>0.1.0a1</title>
      <link>https://pypi.org/project/transformer-attention-hooker/0.1.0a1/</link>
      <description>A lightweight, robust utility for extracting and visualizing attention weights from PyTorch Transformer models.</description>
<author>donghwee.yoon@gmail.com</author>      <pubDate>Fri, 28 Nov 2025 05:31:41 GMT</pubDate>
    </item>  </channel>
</rss>