<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for ts-tokenizer</title>
    <link>https://pypi.org/project/ts-tokenizer/</link>
    <description>Recent updates to the Python Package Index for ts-tokenizer</description>
    <language>en</language>    <item>
      <title>0.1.22</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.22/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed specifically for tokenizing Turkish texts.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 27 Nov 2025 12:02:20 GMT</pubDate>
    </item>    <item>
      <title>0.1.21</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.21/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed specifically for tokenizing Turkish texts.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 27 Nov 2025 11:43:39 GMT</pubDate>
    </item>    <item>
      <title>0.1.20</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.20/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed specifically for tokenizing Turkish texts.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Mon, 28 Apr 2025 22:31:10 GMT</pubDate>
    </item>    <item>
      <title>0.1.19</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.19/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed specifically for tokenizing Turkish texts.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 30 Jan 2025 19:59:37 GMT</pubDate>
    </item>    <item>
      <title>0.1.18</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.18/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed specifically for tokenizing Turkish texts.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Wed, 25 Dec 2024 09:25:32 GMT</pubDate>
    </item>    <item>
      <title>0.1.17</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.17/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed specifically for tokenizing Turkish texts.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Tue, 03 Dec 2024 09:31:25 GMT</pubDate>
    </item>    <item>
      <title>0.1.16</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.16/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed specifically for tokenizing Turkish texts.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Tue, 03 Dec 2024 08:16:33 GMT</pubDate>
    </item>    <item>
      <title>0.1.15</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.15/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed specifically for tokenizing Turkish texts.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Mon, 02 Dec 2024 13:25:47 GMT</pubDate>
    </item>    <item>
      <title>0.1.14</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.14/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed specifically for tokenizing Turkish texts.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Fri, 29 Nov 2024 13:41:53 GMT</pubDate>
    </item>    <item>
      <title>0.1.13</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.13/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed specifically for tokenizing Turkish texts.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 28 Nov 2024 16:20:58 GMT</pubDate>
    </item>    <item>
      <title>0.1.12</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.12/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed for Turkish text.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Fri, 18 Oct 2024 06:47:56 GMT</pubDate>
    </item>    <item>
      <title>0.1.11</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.11/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed for Turkish text.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 17 Oct 2024 14:52:25 GMT</pubDate>
    </item>    <item>
      <title>0.1.10</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.10/</link>
      <description>TS Tokenizer is a hybrid (lexicon-based and rule-based) tokenizer designed for Turkish text.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 17 Oct 2024 14:03:21 GMT</pubDate>
    </item>    <item>
      <title>0.1.9</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.9/</link>
      <description>TS Tokenizer is a rule-based tokenizerspecifically designed for processing Turkish text.It provides functionalities to split text into tokensfollowing the grammatical and linguistic rules of the Turkish language.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 17 Oct 2024 13:38:53 GMT</pubDate>
    </item>    <item>
      <title>0.1.8</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.8/</link>
      <description>TS Tokenizer is a rule-based tokenizerspecifically designed for processing Turkish text.It provides functionalities to split text into tokensfollowing the grammatical and linguistic rules of the Turkish language.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 17 Oct 2024 13:34:36 GMT</pubDate>
    </item>    <item>
      <title>0.1.7</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.7/</link>
      <description>TS Tokenizer is a rule-based tokenizerspecifically designed for processing Turkish text.It provides functionalities to split text into tokensfollowing the grammatical and linguistic rules of the Turkish language.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 17 Oct 2024 12:54:36 GMT</pubDate>
    </item>    <item>
      <title>0.1.6</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.6/</link>
      <description>TS Tokenizer is a rule-based tokenizerspecifically designed for processing Turkish text.It provides functionalities to split text into tokensfollowing the grammatical and linguistic rules of the Turkish language.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 17 Oct 2024 12:45:35 GMT</pubDate>
    </item>    <item>
      <title>0.1.5</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.5/</link>
      <description>TS Tokenizer is a rule-based tokenizerspecifically designed for processing Turkish text.It provides functionalities to split text into tokensfollowing the grammatical and linguistic rules of the Turkish language.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 17 Oct 2024 12:35:13 GMT</pubDate>
    </item>    <item>
      <title>0.1.4</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.4/</link>
      <description>TS Tokenizer is a rule-based tokenizerspecifically designed for processing Turkish text.It provides functionalities to split text into tokensfollowing the grammatical and linguistic rules of the Turkish language.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Thu, 17 Oct 2024 10:25:43 GMT</pubDate>
    </item>    <item>
      <title>0.1.3</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.3/</link>
      <description>TS Tokenizer is a rule-based tokenizerspecifically designed for processing Turkish text.It provides functionalities to split text into tokensfollowing the grammatical and linguistic rules of the Turkish language.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Wed, 04 Sep 2024 11:18:00 GMT</pubDate>
    </item>    <item>
      <title>0.1.2</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.2/</link>
      <description>TS Tokenizer is a rule-based tokenizerspecifically designed for processing Turkish text.It provides functionalities to split text into tokensfollowing the grammatical and linguistic rules of the Turkish language.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Sat, 31 Aug 2024 23:44:42 GMT</pubDate>
    </item>    <item>
      <title>0.1.1</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.1/</link>
      <description>TS Tokenizer is a Turkish Tokenizer.</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Sat, 31 Aug 2024 22:33:30 GMT</pubDate>
    </item>    <item>
      <title>0.1.0</title>
      <link>https://pypi.org/project/ts-tokenizer/0.1.0/</link>
      <description>An old fashioned rule-based tokenizer for Turkish</description>
<author>tanersezerr@gmail.com</author>      <pubDate>Tue, 21 May 2024 23:37:43 GMT</pubDate>
    </item>  </channel>
</rss>