blob: 81639c96daaf371220ad22642ce4540a9adf806d [file] [log] [blame]
<!DOCTYPE html><html lang="en"><head><meta charset="utf-8"><meta name="viewport" content="width=device-width, initial-scale=1.0"><meta name="generator" content="rustdoc"><meta name="description" content="Source of the Rust file `/root/.cargo/git/checkouts/tantivy-65d0bbbddbbd5d02/433372d/src/tokenizer/ngram_tokenizer.rs`."><meta name="keywords" content="rust, rustlang, rust-lang"><title>ngram_tokenizer.rs - source</title><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../SourceSerif4-Regular.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../FiraSans-Regular.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../FiraSans-Medium.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../SourceCodePro-Regular.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../SourceSerif4-Bold.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../SourceCodePro-Semibold.ttf.woff2"><link rel="stylesheet" href="../../../normalize.css"><link rel="stylesheet" href="../../../rustdoc.css" id="mainThemeStyle"><link rel="stylesheet" href="../../../ayu.css" disabled><link rel="stylesheet" href="../../../dark.css" disabled><link rel="stylesheet" href="../../../light.css" id="themeStyle"><script id="default-settings" ></script><script src="../../../storage.js"></script><script defer src="../../../source-script.js"></script><script defer src="../../../source-files.js"></script><script defer src="../../../main.js"></script><noscript><link rel="stylesheet" href="../../../noscript.css"></noscript><link rel="alternate icon" type="image/png" href="../../../favicon-16x16.png"><link rel="alternate icon" type="image/png" href="../../../favicon-32x32.png"><link rel="icon" type="image/svg+xml" href="../../../favicon.svg"></head><body class="rustdoc source"><!--[if lte IE 11]><div class="warning">This old browser is unsupported and will most likely display funky things.</div><![endif]--><nav class="sidebar"><a class="sidebar-logo" href="../../../tantivy/index.html"><div class="logo-container">
<img src="http://fulmicoton.com/tantivy-logo/tantivy-logo.png" alt="logo"></div></a></nav><main><div class="width-limiter"><nav class="sub"><a class="sub-logo-container" href="../../../tantivy/index.html">
<img src="http://fulmicoton.com/tantivy-logo/tantivy-logo.png" alt="logo"></a><form class="search-form"><div class="search-container"><span></span><input class="search-input" name="search" autocomplete="off" spellcheck="false" placeholder="Click or press ‘S’ to search, ‘?’ for more options…" type="search"><div id="help-button" title="help" tabindex="-1"><a href="../../../help.html">?</a></div><div id="settings-menu" tabindex="-1"><a href="../../../settings.html" title="settings"><img width="22" height="22" alt="Change settings" src="../../../wheel.svg"></a></div></div></form></nav><section id="main-content" class="content"><div class="example-wrap"><pre class="src-line-numbers"><span id="1">1</span>
<span id="2">2</span>
<span id="3">3</span>
<span id="4">4</span>
<span id="5">5</span>
<span id="6">6</span>
<span id="7">7</span>
<span id="8">8</span>
<span id="9">9</span>
<span id="10">10</span>
<span id="11">11</span>
<span id="12">12</span>
<span id="13">13</span>
<span id="14">14</span>
<span id="15">15</span>
<span id="16">16</span>
<span id="17">17</span>
<span id="18">18</span>
<span id="19">19</span>
<span id="20">20</span>
<span id="21">21</span>
<span id="22">22</span>
<span id="23">23</span>
<span id="24">24</span>
<span id="25">25</span>
<span id="26">26</span>
<span id="27">27</span>
<span id="28">28</span>
<span id="29">29</span>
<span id="30">30</span>
<span id="31">31</span>
<span id="32">32</span>
<span id="33">33</span>
<span id="34">34</span>
<span id="35">35</span>
<span id="36">36</span>
<span id="37">37</span>
<span id="38">38</span>
<span id="39">39</span>
<span id="40">40</span>
<span id="41">41</span>
<span id="42">42</span>
<span id="43">43</span>
<span id="44">44</span>
<span id="45">45</span>
<span id="46">46</span>
<span id="47">47</span>
<span id="48">48</span>
<span id="49">49</span>
<span id="50">50</span>
<span id="51">51</span>
<span id="52">52</span>
<span id="53">53</span>
<span id="54">54</span>
<span id="55">55</span>
<span id="56">56</span>
<span id="57">57</span>
<span id="58">58</span>
<span id="59">59</span>
<span id="60">60</span>
<span id="61">61</span>
<span id="62">62</span>
<span id="63">63</span>
<span id="64">64</span>
<span id="65">65</span>
<span id="66">66</span>
<span id="67">67</span>
<span id="68">68</span>
<span id="69">69</span>
<span id="70">70</span>
<span id="71">71</span>
<span id="72">72</span>
<span id="73">73</span>
<span id="74">74</span>
<span id="75">75</span>
<span id="76">76</span>
<span id="77">77</span>
<span id="78">78</span>
<span id="79">79</span>
<span id="80">80</span>
<span id="81">81</span>
<span id="82">82</span>
<span id="83">83</span>
<span id="84">84</span>
<span id="85">85</span>
<span id="86">86</span>
<span id="87">87</span>
<span id="88">88</span>
<span id="89">89</span>
<span id="90">90</span>
<span id="91">91</span>
<span id="92">92</span>
<span id="93">93</span>
<span id="94">94</span>
<span id="95">95</span>
<span id="96">96</span>
<span id="97">97</span>
<span id="98">98</span>
<span id="99">99</span>
<span id="100">100</span>
<span id="101">101</span>
<span id="102">102</span>
<span id="103">103</span>
<span id="104">104</span>
<span id="105">105</span>
<span id="106">106</span>
<span id="107">107</span>
<span id="108">108</span>
<span id="109">109</span>
<span id="110">110</span>
<span id="111">111</span>
<span id="112">112</span>
<span id="113">113</span>
<span id="114">114</span>
<span id="115">115</span>
<span id="116">116</span>
<span id="117">117</span>
<span id="118">118</span>
<span id="119">119</span>
<span id="120">120</span>
<span id="121">121</span>
<span id="122">122</span>
<span id="123">123</span>
<span id="124">124</span>
<span id="125">125</span>
<span id="126">126</span>
<span id="127">127</span>
<span id="128">128</span>
<span id="129">129</span>
<span id="130">130</span>
<span id="131">131</span>
<span id="132">132</span>
<span id="133">133</span>
<span id="134">134</span>
<span id="135">135</span>
<span id="136">136</span>
<span id="137">137</span>
<span id="138">138</span>
<span id="139">139</span>
<span id="140">140</span>
<span id="141">141</span>
<span id="142">142</span>
<span id="143">143</span>
<span id="144">144</span>
<span id="145">145</span>
<span id="146">146</span>
<span id="147">147</span>
<span id="148">148</span>
<span id="149">149</span>
<span id="150">150</span>
<span id="151">151</span>
<span id="152">152</span>
<span id="153">153</span>
<span id="154">154</span>
<span id="155">155</span>
<span id="156">156</span>
<span id="157">157</span>
<span id="158">158</span>
<span id="159">159</span>
<span id="160">160</span>
<span id="161">161</span>
<span id="162">162</span>
<span id="163">163</span>
<span id="164">164</span>
<span id="165">165</span>
<span id="166">166</span>
<span id="167">167</span>
<span id="168">168</span>
<span id="169">169</span>
<span id="170">170</span>
<span id="171">171</span>
<span id="172">172</span>
<span id="173">173</span>
<span id="174">174</span>
<span id="175">175</span>
<span id="176">176</span>
<span id="177">177</span>
<span id="178">178</span>
<span id="179">179</span>
<span id="180">180</span>
<span id="181">181</span>
<span id="182">182</span>
<span id="183">183</span>
<span id="184">184</span>
<span id="185">185</span>
<span id="186">186</span>
<span id="187">187</span>
<span id="188">188</span>
<span id="189">189</span>
<span id="190">190</span>
<span id="191">191</span>
<span id="192">192</span>
<span id="193">193</span>
<span id="194">194</span>
<span id="195">195</span>
<span id="196">196</span>
<span id="197">197</span>
<span id="198">198</span>
<span id="199">199</span>
<span id="200">200</span>
<span id="201">201</span>
<span id="202">202</span>
<span id="203">203</span>
<span id="204">204</span>
<span id="205">205</span>
<span id="206">206</span>
<span id="207">207</span>
<span id="208">208</span>
<span id="209">209</span>
<span id="210">210</span>
<span id="211">211</span>
<span id="212">212</span>
<span id="213">213</span>
<span id="214">214</span>
<span id="215">215</span>
<span id="216">216</span>
<span id="217">217</span>
<span id="218">218</span>
<span id="219">219</span>
<span id="220">220</span>
<span id="221">221</span>
<span id="222">222</span>
<span id="223">223</span>
<span id="224">224</span>
<span id="225">225</span>
<span id="226">226</span>
<span id="227">227</span>
<span id="228">228</span>
<span id="229">229</span>
<span id="230">230</span>
<span id="231">231</span>
<span id="232">232</span>
<span id="233">233</span>
<span id="234">234</span>
<span id="235">235</span>
<span id="236">236</span>
<span id="237">237</span>
<span id="238">238</span>
<span id="239">239</span>
<span id="240">240</span>
<span id="241">241</span>
<span id="242">242</span>
<span id="243">243</span>
<span id="244">244</span>
<span id="245">245</span>
<span id="246">246</span>
<span id="247">247</span>
<span id="248">248</span>
<span id="249">249</span>
<span id="250">250</span>
<span id="251">251</span>
<span id="252">252</span>
<span id="253">253</span>
<span id="254">254</span>
<span id="255">255</span>
<span id="256">256</span>
<span id="257">257</span>
<span id="258">258</span>
<span id="259">259</span>
<span id="260">260</span>
<span id="261">261</span>
<span id="262">262</span>
<span id="263">263</span>
<span id="264">264</span>
<span id="265">265</span>
<span id="266">266</span>
<span id="267">267</span>
<span id="268">268</span>
<span id="269">269</span>
<span id="270">270</span>
<span id="271">271</span>
<span id="272">272</span>
<span id="273">273</span>
<span id="274">274</span>
<span id="275">275</span>
<span id="276">276</span>
<span id="277">277</span>
<span id="278">278</span>
<span id="279">279</span>
<span id="280">280</span>
<span id="281">281</span>
<span id="282">282</span>
<span id="283">283</span>
<span id="284">284</span>
<span id="285">285</span>
<span id="286">286</span>
<span id="287">287</span>
<span id="288">288</span>
<span id="289">289</span>
<span id="290">290</span>
<span id="291">291</span>
<span id="292">292</span>
<span id="293">293</span>
<span id="294">294</span>
<span id="295">295</span>
<span id="296">296</span>
<span id="297">297</span>
<span id="298">298</span>
<span id="299">299</span>
<span id="300">300</span>
<span id="301">301</span>
<span id="302">302</span>
<span id="303">303</span>
<span id="304">304</span>
<span id="305">305</span>
<span id="306">306</span>
<span id="307">307</span>
<span id="308">308</span>
<span id="309">309</span>
<span id="310">310</span>
<span id="311">311</span>
<span id="312">312</span>
<span id="313">313</span>
<span id="314">314</span>
<span id="315">315</span>
<span id="316">316</span>
<span id="317">317</span>
<span id="318">318</span>
<span id="319">319</span>
<span id="320">320</span>
<span id="321">321</span>
<span id="322">322</span>
<span id="323">323</span>
<span id="324">324</span>
<span id="325">325</span>
<span id="326">326</span>
<span id="327">327</span>
<span id="328">328</span>
<span id="329">329</span>
<span id="330">330</span>
<span id="331">331</span>
<span id="332">332</span>
<span id="333">333</span>
<span id="334">334</span>
<span id="335">335</span>
<span id="336">336</span>
<span id="337">337</span>
<span id="338">338</span>
<span id="339">339</span>
<span id="340">340</span>
<span id="341">341</span>
<span id="342">342</span>
<span id="343">343</span>
<span id="344">344</span>
<span id="345">345</span>
<span id="346">346</span>
<span id="347">347</span>
<span id="348">348</span>
<span id="349">349</span>
<span id="350">350</span>
<span id="351">351</span>
<span id="352">352</span>
<span id="353">353</span>
<span id="354">354</span>
<span id="355">355</span>
<span id="356">356</span>
<span id="357">357</span>
<span id="358">358</span>
<span id="359">359</span>
<span id="360">360</span>
<span id="361">361</span>
<span id="362">362</span>
<span id="363">363</span>
<span id="364">364</span>
<span id="365">365</span>
<span id="366">366</span>
<span id="367">367</span>
<span id="368">368</span>
<span id="369">369</span>
<span id="370">370</span>
<span id="371">371</span>
<span id="372">372</span>
<span id="373">373</span>
<span id="374">374</span>
<span id="375">375</span>
<span id="376">376</span>
<span id="377">377</span>
<span id="378">378</span>
<span id="379">379</span>
<span id="380">380</span>
<span id="381">381</span>
<span id="382">382</span>
<span id="383">383</span>
<span id="384">384</span>
<span id="385">385</span>
<span id="386">386</span>
<span id="387">387</span>
<span id="388">388</span>
<span id="389">389</span>
<span id="390">390</span>
<span id="391">391</span>
<span id="392">392</span>
<span id="393">393</span>
<span id="394">394</span>
<span id="395">395</span>
<span id="396">396</span>
<span id="397">397</span>
<span id="398">398</span>
<span id="399">399</span>
<span id="400">400</span>
<span id="401">401</span>
<span id="402">402</span>
<span id="403">403</span>
<span id="404">404</span>
<span id="405">405</span>
<span id="406">406</span>
<span id="407">407</span>
<span id="408">408</span>
<span id="409">409</span>
<span id="410">410</span>
<span id="411">411</span>
<span id="412">412</span>
<span id="413">413</span>
<span id="414">414</span>
<span id="415">415</span>
<span id="416">416</span>
<span id="417">417</span>
<span id="418">418</span>
<span id="419">419</span>
<span id="420">420</span>
<span id="421">421</span>
<span id="422">422</span>
<span id="423">423</span>
<span id="424">424</span>
<span id="425">425</span>
<span id="426">426</span>
<span id="427">427</span>
<span id="428">428</span>
<span id="429">429</span>
<span id="430">430</span>
<span id="431">431</span>
<span id="432">432</span>
<span id="433">433</span>
<span id="434">434</span>
<span id="435">435</span>
<span id="436">436</span>
<span id="437">437</span>
<span id="438">438</span>
<span id="439">439</span>
<span id="440">440</span>
<span id="441">441</span>
<span id="442">442</span>
<span id="443">443</span>
<span id="444">444</span>
<span id="445">445</span>
<span id="446">446</span>
<span id="447">447</span>
<span id="448">448</span>
<span id="449">449</span>
<span id="450">450</span>
<span id="451">451</span>
<span id="452">452</span>
<span id="453">453</span>
<span id="454">454</span>
<span id="455">455</span>
<span id="456">456</span>
</pre><pre class="rust"><code><span class="kw">use super</span>::{Token, TokenStream, Tokenizer};
<span class="kw">use </span><span class="kw">crate</span>::tokenizer::BoxTokenStream;
<span class="doccomment">/// Tokenize the text by splitting words into n-grams of the given size(s)
///
/// With this tokenizer, the `position` is always 0.
/// Beware however, in presence of multiple value for the same field,
/// the position will be `POSITION_GAP * index of value`.
///
/// Example 1: `hello` would be tokenized as (min_gram: 2, max_gram: 3, prefix_only: false)
///
/// | Term | he | hel | el | ell | ll | llo | lo |
/// |----------|-----|-----|-----|-----|-----|-----|----|
/// | Position | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
/// | Offsets | 0,2 | 0,3 | 1,3 | 1,4 | 2,4 | 2,5 | 3,5|
///
/// Example 2: `hello` would be tokenized as (min_gram: 2, max_gram: 5, prefix_only: **true**)
///
/// | Term | he | hel | hell | hello |
/// |----------|-----|-----|-------|-------|
/// | Position | 0 | 0 | 0 | 0 |
/// | Offsets | 0,2 | 0,3 | 0,4 | 0,5 |
///
/// Example 3: `hεllo` (non-ascii) would be tokenized as (min_gram: 2, max_gram: 5, prefix_only:
/// **true**)
///
/// | Term | hε | hεl | hεll | hεllo |
/// |----------|-----|-----|-------|-------|
/// | Position | 0 | 0 | 0 | 0 |
/// | Offsets | 0,3 | 0,4 | 0,5 | 0,6 |
///
/// # Example
///
/// ```rust
/// use tantivy::tokenizer::*;
///
/// let tokenizer = NgramTokenizer::new(2, 3, false);
/// let mut stream = tokenizer.token_stream(&quot;hello&quot;);
/// {
/// let token = stream.next().unwrap();
/// assert_eq!(token.text, &quot;he&quot;);
/// assert_eq!(token.offset_from, 0);
/// assert_eq!(token.offset_to, 2);
/// }
/// {
/// let token = stream.next().unwrap();
/// assert_eq!(token.text, &quot;hel&quot;);
/// assert_eq!(token.offset_from, 0);
/// assert_eq!(token.offset_to, 3);
/// }
/// {
/// let token = stream.next().unwrap();
/// assert_eq!(token.text, &quot;el&quot;);
/// assert_eq!(token.offset_from, 1);
/// assert_eq!(token.offset_to, 3);
/// }
/// {
/// let token = stream.next().unwrap();
/// assert_eq!(token.text, &quot;ell&quot;);
/// assert_eq!(token.offset_from, 1);
/// assert_eq!(token.offset_to, 4);
/// }
/// {
/// let token = stream.next().unwrap();
/// assert_eq!(token.text, &quot;ll&quot;);
/// assert_eq!(token.offset_from, 2);
/// assert_eq!(token.offset_to, 4);
/// }
/// {
/// let token = stream.next().unwrap();
/// assert_eq!(token.text, &quot;llo&quot;);
/// assert_eq!(token.offset_from, 2);
/// assert_eq!(token.offset_to, 5);
/// }
/// {
/// let token = stream.next().unwrap();
/// assert_eq!(token.text, &quot;lo&quot;);
/// assert_eq!(token.offset_from, 3);
/// assert_eq!(token.offset_to, 5);
/// }
/// assert!(stream.next().is_none());
/// ```
</span><span class="attribute">#[derive(Clone)]
</span><span class="kw">pub struct </span>NgramTokenizer {
<span class="doccomment">/// min size of the n-gram
</span>min_gram: usize,
<span class="doccomment">/// max size of the n-gram
</span>max_gram: usize,
<span class="doccomment">/// if true, will only parse the leading edge of the input
</span>prefix_only: bool,
}
<span class="kw">impl </span>NgramTokenizer {
<span class="doccomment">/// Configures a new Ngram tokenizer
</span><span class="kw">pub fn </span>new(min_gram: usize, max_gram: usize, prefix_only: bool) -&gt; NgramTokenizer {
<span class="macro">assert!</span>(min_gram &gt; <span class="number">0</span>, <span class="string">&quot;min_gram must be greater than 0&quot;</span>);
<span class="macro">assert!</span>(
min_gram &lt;= max_gram,
<span class="string">&quot;min_gram must not be greater than max_gram&quot;
</span>);
NgramTokenizer {
min_gram,
max_gram,
prefix_only,
}
}
<span class="doccomment">/// Create a `NGramTokenizer` which generates tokens for all inner ngrams.
///
/// This is as opposed to only prefix ngrams .
</span><span class="kw">pub fn </span>all_ngrams(min_gram: usize, max_gram: usize) -&gt; NgramTokenizer {
<span class="self">Self</span>::new(min_gram, max_gram, <span class="bool-val">false</span>)
}
<span class="doccomment">/// Create a `NGramTokenizer` which only generates tokens for the
/// prefix ngrams.
</span><span class="kw">pub fn </span>prefix_only(min_gram: usize, max_gram: usize) -&gt; NgramTokenizer {
<span class="self">Self</span>::new(min_gram, max_gram, <span class="bool-val">true</span>)
}
}
<span class="doccomment">/// TokenStream associate to the `NgramTokenizer`
</span><span class="kw">pub struct </span>NgramTokenStream&lt;<span class="lifetime">&#39;a</span>&gt; {
<span class="doccomment">/// parameters
</span>ngram_charidx_iterator: StutteringIterator&lt;CodepointFrontiers&lt;<span class="lifetime">&#39;a</span>&gt;&gt;,
<span class="doccomment">/// true if the NgramTokenStream is in prefix mode.
</span>prefix_only: bool,
<span class="doccomment">/// input
</span>text: <span class="kw-2">&amp;</span><span class="lifetime">&#39;a </span>str,
<span class="doccomment">/// output
</span>token: Token,
}
<span class="kw">impl </span>Tokenizer <span class="kw">for </span>NgramTokenizer {
<span class="kw">fn </span>token_stream&lt;<span class="lifetime">&#39;a</span>&gt;(<span class="kw-2">&amp;</span><span class="self">self</span>, text: <span class="kw-2">&amp;</span><span class="lifetime">&#39;a </span>str) -&gt; BoxTokenStream&lt;<span class="lifetime">&#39;a</span>&gt; {
From::from(NgramTokenStream {
ngram_charidx_iterator: StutteringIterator::new(
CodepointFrontiers::for_str(text),
<span class="self">self</span>.min_gram,
<span class="self">self</span>.max_gram,
),
prefix_only: <span class="self">self</span>.prefix_only,
text,
token: Token::default(),
})
}
}
<span class="kw">impl</span>&lt;<span class="lifetime">&#39;a</span>&gt; TokenStream <span class="kw">for </span>NgramTokenStream&lt;<span class="lifetime">&#39;a</span>&gt; {
<span class="kw">fn </span>advance(<span class="kw-2">&amp;mut </span><span class="self">self</span>) -&gt; bool {
<span class="kw">if let </span><span class="prelude-val">Some</span>((offset_from, offset_to)) = <span class="self">self</span>.ngram_charidx_iterator.next() {
<span class="kw">if </span><span class="self">self</span>.prefix_only &amp;&amp; offset_from &gt; <span class="number">0 </span>{
<span class="kw">return </span><span class="bool-val">false</span>;
}
<span class="self">self</span>.token.position = <span class="number">0</span>;
<span class="self">self</span>.token.offset_from = offset_from;
<span class="self">self</span>.token.offset_to = offset_to;
<span class="self">self</span>.token.text.clear();
<span class="self">self</span>.token.text.push_str(<span class="kw-2">&amp;</span><span class="self">self</span>.text[offset_from..offset_to]);
<span class="bool-val">true
</span>} <span class="kw">else </span>{
<span class="bool-val">false
</span>}
}
<span class="kw">fn </span>token(<span class="kw-2">&amp;</span><span class="self">self</span>) -&gt; <span class="kw-2">&amp;</span>Token {
<span class="kw-2">&amp;</span><span class="self">self</span>.token
}
<span class="kw">fn </span>token_mut(<span class="kw-2">&amp;mut </span><span class="self">self</span>) -&gt; <span class="kw-2">&amp;mut </span>Token {
<span class="kw-2">&amp;mut </span><span class="self">self</span>.token
}
}
<span class="doccomment">/// This iterator takes an underlying Iterator
/// and emits all of the pairs `(a,b)` such that
/// a and b are items emitted by the iterator at
/// an interval between `min_gram` and `max_gram`.
///
/// The elements are emitted in the order of appearance
/// of `a` first, `b` then.
///
/// See `test_stutterring_iterator` for an example of its
/// output.
</span><span class="kw">struct </span>StutteringIterator&lt;T&gt; {
underlying: T,
min_gram: usize,
max_gram: usize,
memory: Vec&lt;usize&gt;,
cursor: usize,
gram_len: usize,
}
<span class="kw">impl</span>&lt;T&gt; StutteringIterator&lt;T&gt;
<span class="kw">where </span>T: Iterator&lt;Item = usize&gt;
{
<span class="kw">pub fn </span>new(<span class="kw-2">mut </span>underlying: T, min_gram: usize, max_gram: usize) -&gt; StutteringIterator&lt;T&gt; {
<span class="macro">assert!</span>(min_gram &gt; <span class="number">0</span>);
<span class="kw">let </span>memory: Vec&lt;usize&gt; = (<span class="kw-2">&amp;mut </span>underlying).take(max_gram + <span class="number">1</span>).collect();
<span class="kw">if </span>memory.len() &lt;= min_gram {
<span class="comment">// returns an empty iterator
</span>StutteringIterator {
underlying,
min_gram: <span class="number">1</span>,
max_gram: <span class="number">0</span>,
memory,
cursor: <span class="number">0</span>,
gram_len: <span class="number">0</span>,
}
} <span class="kw">else </span>{
StutteringIterator {
underlying,
min_gram,
max_gram: memory.len() - <span class="number">1</span>,
memory,
cursor: <span class="number">0</span>,
gram_len: min_gram,
}
}
}
}
<span class="kw">impl</span>&lt;T&gt; Iterator <span class="kw">for </span>StutteringIterator&lt;T&gt;
<span class="kw">where </span>T: Iterator&lt;Item = usize&gt;
{
<span class="kw">type </span>Item = (usize, usize);
<span class="kw">fn </span>next(<span class="kw-2">&amp;mut </span><span class="self">self</span>) -&gt; <span class="prelude-ty">Option</span>&lt;(usize, usize)&gt; {
<span class="kw">if </span><span class="self">self</span>.gram_len &gt; <span class="self">self</span>.max_gram {
<span class="comment">// we have exhausted all options
// starting at `self.memory[self.cursor]`.
//
// Time to advance.
</span><span class="self">self</span>.gram_len = <span class="self">self</span>.min_gram;
<span class="kw">if let </span><span class="prelude-val">Some</span>(next_val) = <span class="self">self</span>.underlying.next() {
<span class="self">self</span>.memory[<span class="self">self</span>.cursor] = next_val;
} <span class="kw">else </span>{
<span class="self">self</span>.max_gram -= <span class="number">1</span>;
}
<span class="self">self</span>.cursor += <span class="number">1</span>;
<span class="kw">if </span><span class="self">self</span>.cursor &gt;= <span class="self">self</span>.memory.len() {
<span class="self">self</span>.cursor = <span class="number">0</span>;
}
}
<span class="kw">if </span><span class="self">self</span>.max_gram &lt; <span class="self">self</span>.min_gram {
<span class="kw">return </span><span class="prelude-val">None</span>;
}
<span class="kw">let </span>start = <span class="self">self</span>.memory[<span class="self">self</span>.cursor % <span class="self">self</span>.memory.len()];
<span class="kw">let </span>stop = <span class="self">self</span>.memory[(<span class="self">self</span>.cursor + <span class="self">self</span>.gram_len) % <span class="self">self</span>.memory.len()];
<span class="self">self</span>.gram_len += <span class="number">1</span>;
<span class="prelude-val">Some</span>((start, stop))
}
}
<span class="doccomment">/// Emits all of the offsets where a codepoint starts
/// or a codepoint ends.
///
/// By convention, we emit `[0]` for the empty string.
</span><span class="kw">struct </span>CodepointFrontiers&lt;<span class="lifetime">&#39;a</span>&gt; {
s: <span class="kw-2">&amp;</span><span class="lifetime">&#39;a </span>str,
next_el: <span class="prelude-ty">Option</span>&lt;usize&gt;,
}
<span class="kw">impl</span>&lt;<span class="lifetime">&#39;a</span>&gt; CodepointFrontiers&lt;<span class="lifetime">&#39;a</span>&gt; {
<span class="kw">fn </span>for_str(s: <span class="kw-2">&amp;</span><span class="lifetime">&#39;a </span>str) -&gt; <span class="self">Self </span>{
CodepointFrontiers {
s,
next_el: <span class="prelude-val">Some</span>(<span class="number">0</span>),
}
}
}
<span class="kw">impl</span>&lt;<span class="lifetime">&#39;a</span>&gt; Iterator <span class="kw">for </span>CodepointFrontiers&lt;<span class="lifetime">&#39;a</span>&gt; {
<span class="kw">type </span>Item = usize;
<span class="kw">fn </span>next(<span class="kw-2">&amp;mut </span><span class="self">self</span>) -&gt; <span class="prelude-ty">Option</span>&lt;usize&gt; {
<span class="self">self</span>.next_el.map(|offset| {
<span class="kw">if </span><span class="self">self</span>.s.is_empty() {
<span class="self">self</span>.next_el = <span class="prelude-val">None</span>;
} <span class="kw">else </span>{
<span class="kw">let </span>first_codepoint_width = utf8_codepoint_width(<span class="self">self</span>.s.as_bytes()[<span class="number">0</span>]);
<span class="self">self</span>.s = <span class="kw-2">&amp;</span><span class="self">self</span>.s[first_codepoint_width..];
<span class="self">self</span>.next_el = <span class="prelude-val">Some</span>(offset + first_codepoint_width);
}
offset
})
}
}
<span class="kw">const </span>CODEPOINT_UTF8_WIDTH: [u8; <span class="number">16</span>] = [<span class="number">1</span>, <span class="number">1</span>, <span class="number">1</span>, <span class="number">1</span>, <span class="number">1</span>, <span class="number">1</span>, <span class="number">1</span>, <span class="number">1</span>, <span class="number">2</span>, <span class="number">2</span>, <span class="number">2</span>, <span class="number">2</span>, <span class="number">2</span>, <span class="number">2</span>, <span class="number">3</span>, <span class="number">4</span>];
<span class="comment">// Number of bytes to encode a codepoint in UTF-8 given
// the first byte.
//
// To do that we count the number of higher significant bits set to `1`.
</span><span class="kw">fn </span>utf8_codepoint_width(b: u8) -&gt; usize {
<span class="kw">let </span>higher_4_bits = (b <span class="kw">as </span>usize) &gt;&gt; <span class="number">4</span>;
CODEPOINT_UTF8_WIDTH[higher_4_bits] <span class="kw">as </span>usize
}
<span class="attribute">#[cfg(test)]
</span><span class="kw">mod </span>tests {
<span class="kw">use super</span>::{utf8_codepoint_width, CodepointFrontiers, NgramTokenizer, StutteringIterator};
<span class="kw">use </span><span class="kw">crate</span>::tokenizer::tests::assert_token;
<span class="kw">use </span><span class="kw">crate</span>::tokenizer::tokenizer::Tokenizer;
<span class="kw">use </span><span class="kw">crate</span>::tokenizer::{BoxTokenStream, Token};
<span class="kw">fn </span>test_helper(<span class="kw-2">mut </span>tokenizer: BoxTokenStream) -&gt; Vec&lt;Token&gt; {
<span class="kw">let </span><span class="kw-2">mut </span>tokens: Vec&lt;Token&gt; = <span class="macro">vec!</span>[];
tokenizer.process(<span class="kw-2">&amp;mut </span>|token: <span class="kw-2">&amp;</span>Token| tokens.push(token.clone()));
tokens
}
<span class="attribute">#[test]
</span><span class="kw">fn </span>test_utf8_codepoint_width() {
<span class="comment">// 0xxx
</span><span class="kw">for </span>i <span class="kw">in </span><span class="number">0</span>..<span class="number">128 </span>{
<span class="macro">assert_eq!</span>(utf8_codepoint_width(i), <span class="number">1</span>);
}
<span class="comment">// 110xx
</span><span class="kw">for </span>i <span class="kw">in </span>(<span class="number">128 </span>| <span class="number">64</span>)..(<span class="number">128 </span>| <span class="number">64 </span>| <span class="number">32</span>) {
<span class="macro">assert_eq!</span>(utf8_codepoint_width(i), <span class="number">2</span>);
}
<span class="comment">// 1110xx
</span><span class="kw">for </span>i <span class="kw">in </span>(<span class="number">128 </span>| <span class="number">64 </span>| <span class="number">32</span>)..(<span class="number">128 </span>| <span class="number">64 </span>| <span class="number">32 </span>| <span class="number">16</span>) {
<span class="macro">assert_eq!</span>(utf8_codepoint_width(i), <span class="number">3</span>);
}
<span class="comment">// 1111xx
</span><span class="kw">for </span>i <span class="kw">in </span>(<span class="number">128 </span>| <span class="number">64 </span>| <span class="number">32 </span>| <span class="number">16</span>)..<span class="number">256 </span>{
<span class="macro">assert_eq!</span>(utf8_codepoint_width(i <span class="kw">as </span>u8), <span class="number">4</span>);
}
}
<span class="attribute">#[test]
</span><span class="kw">fn </span>test_codepoint_frontiers() {
<span class="macro">assert_eq!</span>(CodepointFrontiers::for_str(<span class="string">&quot;&quot;</span>).collect::&lt;Vec&lt;<span class="kw">_</span>&gt;&gt;(), <span class="macro">vec!</span>[<span class="number">0</span>]);
<span class="macro">assert_eq!</span>(
CodepointFrontiers::for_str(<span class="string">&quot;abcd&quot;</span>).collect::&lt;Vec&lt;<span class="kw">_</span>&gt;&gt;(),
<span class="macro">vec!</span>[<span class="number">0</span>, <span class="number">1</span>, <span class="number">2</span>, <span class="number">3</span>, <span class="number">4</span>]
);
<span class="macro">assert_eq!</span>(
CodepointFrontiers::for_str(<span class="string">&quot;aあ&quot;</span>).collect::&lt;Vec&lt;<span class="kw">_</span>&gt;&gt;(),
<span class="macro">vec!</span>[<span class="number">0</span>, <span class="number">1</span>, <span class="number">4</span>]
);
}
<span class="attribute">#[test]
</span><span class="kw">fn </span>test_ngram_tokenizer_1_2_false() {
<span class="kw">let </span>tokens = test_helper(NgramTokenizer::all_ngrams(<span class="number">1</span>, <span class="number">2</span>).token_stream(<span class="string">&quot;hello&quot;</span>));
<span class="macro">assert_eq!</span>(tokens.len(), <span class="number">9</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">0</span>], <span class="number">0</span>, <span class="string">&quot;h&quot;</span>, <span class="number">0</span>, <span class="number">1</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">1</span>], <span class="number">0</span>, <span class="string">&quot;he&quot;</span>, <span class="number">0</span>, <span class="number">2</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">2</span>], <span class="number">0</span>, <span class="string">&quot;e&quot;</span>, <span class="number">1</span>, <span class="number">2</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">3</span>], <span class="number">0</span>, <span class="string">&quot;el&quot;</span>, <span class="number">1</span>, <span class="number">3</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">4</span>], <span class="number">0</span>, <span class="string">&quot;l&quot;</span>, <span class="number">2</span>, <span class="number">3</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">5</span>], <span class="number">0</span>, <span class="string">&quot;ll&quot;</span>, <span class="number">2</span>, <span class="number">4</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">6</span>], <span class="number">0</span>, <span class="string">&quot;l&quot;</span>, <span class="number">3</span>, <span class="number">4</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">7</span>], <span class="number">0</span>, <span class="string">&quot;lo&quot;</span>, <span class="number">3</span>, <span class="number">5</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">8</span>], <span class="number">0</span>, <span class="string">&quot;o&quot;</span>, <span class="number">4</span>, <span class="number">5</span>);
}
<span class="attribute">#[test]
</span><span class="kw">fn </span>test_ngram_tokenizer_min_max_equal() {
<span class="kw">let </span>tokens = test_helper(NgramTokenizer::all_ngrams(<span class="number">3</span>, <span class="number">3</span>).token_stream(<span class="string">&quot;hello&quot;</span>));
<span class="macro">assert_eq!</span>(tokens.len(), <span class="number">3</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">0</span>], <span class="number">0</span>, <span class="string">&quot;hel&quot;</span>, <span class="number">0</span>, <span class="number">3</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">1</span>], <span class="number">0</span>, <span class="string">&quot;ell&quot;</span>, <span class="number">1</span>, <span class="number">4</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">2</span>], <span class="number">0</span>, <span class="string">&quot;llo&quot;</span>, <span class="number">2</span>, <span class="number">5</span>);
}
<span class="attribute">#[test]
</span><span class="kw">fn </span>test_ngram_tokenizer_2_5_prefix() {
<span class="kw">let </span>tokens = test_helper(NgramTokenizer::prefix_only(<span class="number">2</span>, <span class="number">5</span>).token_stream(<span class="string">&quot;frankenstein&quot;</span>));
<span class="macro">assert_eq!</span>(tokens.len(), <span class="number">4</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">0</span>], <span class="number">0</span>, <span class="string">&quot;fr&quot;</span>, <span class="number">0</span>, <span class="number">2</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">1</span>], <span class="number">0</span>, <span class="string">&quot;fra&quot;</span>, <span class="number">0</span>, <span class="number">3</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">2</span>], <span class="number">0</span>, <span class="string">&quot;fran&quot;</span>, <span class="number">0</span>, <span class="number">4</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">3</span>], <span class="number">0</span>, <span class="string">&quot;frank&quot;</span>, <span class="number">0</span>, <span class="number">5</span>);
}
<span class="attribute">#[test]
</span><span class="kw">fn </span>test_ngram_non_ascii_1_2() {
<span class="kw">let </span>tokens = test_helper(NgramTokenizer::all_ngrams(<span class="number">1</span>, <span class="number">2</span>).token_stream(<span class="string">&quot;hεllo&quot;</span>));
<span class="macro">assert_eq!</span>(tokens.len(), <span class="number">9</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">0</span>], <span class="number">0</span>, <span class="string">&quot;h&quot;</span>, <span class="number">0</span>, <span class="number">1</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">1</span>], <span class="number">0</span>, <span class="string">&quot;hε&quot;</span>, <span class="number">0</span>, <span class="number">3</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">2</span>], <span class="number">0</span>, <span class="string">&quot;ε&quot;</span>, <span class="number">1</span>, <span class="number">3</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">3</span>], <span class="number">0</span>, <span class="string">&quot;εl&quot;</span>, <span class="number">1</span>, <span class="number">4</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">4</span>], <span class="number">0</span>, <span class="string">&quot;l&quot;</span>, <span class="number">3</span>, <span class="number">4</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">5</span>], <span class="number">0</span>, <span class="string">&quot;ll&quot;</span>, <span class="number">3</span>, <span class="number">5</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">6</span>], <span class="number">0</span>, <span class="string">&quot;l&quot;</span>, <span class="number">4</span>, <span class="number">5</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">7</span>], <span class="number">0</span>, <span class="string">&quot;lo&quot;</span>, <span class="number">4</span>, <span class="number">6</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">8</span>], <span class="number">0</span>, <span class="string">&quot;o&quot;</span>, <span class="number">5</span>, <span class="number">6</span>);
}
<span class="attribute">#[test]
</span><span class="kw">fn </span>test_ngram_non_ascii_2_5_prefix() {
<span class="kw">let </span>tokens = test_helper(NgramTokenizer::prefix_only(<span class="number">2</span>, <span class="number">5</span>).token_stream(<span class="string">&quot;hεllo&quot;</span>));
<span class="macro">assert_eq!</span>(tokens.len(), <span class="number">4</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">0</span>], <span class="number">0</span>, <span class="string">&quot;hε&quot;</span>, <span class="number">0</span>, <span class="number">3</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">1</span>], <span class="number">0</span>, <span class="string">&quot;hεl&quot;</span>, <span class="number">0</span>, <span class="number">4</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">2</span>], <span class="number">0</span>, <span class="string">&quot;hεll&quot;</span>, <span class="number">0</span>, <span class="number">5</span>);
assert_token(<span class="kw-2">&amp;</span>tokens[<span class="number">3</span>], <span class="number">0</span>, <span class="string">&quot;hεllo&quot;</span>, <span class="number">0</span>, <span class="number">6</span>);
}
<span class="attribute">#[test]
</span><span class="kw">fn </span>test_ngram_empty() {
<span class="kw">let </span>tokens = test_helper(NgramTokenizer::all_ngrams(<span class="number">1</span>, <span class="number">5</span>).token_stream(<span class="string">&quot;&quot;</span>));
<span class="macro">assert!</span>(tokens.is_empty());
<span class="kw">let </span>tokens = test_helper(NgramTokenizer::all_ngrams(<span class="number">2</span>, <span class="number">5</span>).token_stream(<span class="string">&quot;&quot;</span>));
<span class="macro">assert!</span>(tokens.is_empty());
}
<span class="attribute">#[test]
#[should_panic(expected = <span class="string">&quot;min_gram must be greater than 0&quot;</span>)]
</span><span class="kw">fn </span>test_ngram_min_max_interval_empty() {
test_helper(NgramTokenizer::all_ngrams(<span class="number">0</span>, <span class="number">2</span>).token_stream(<span class="string">&quot;hellossss&quot;</span>));
}
<span class="attribute">#[test]
#[should_panic(expected = <span class="string">&quot;min_gram must not be greater than max_gram&quot;</span>)]
</span><span class="kw">fn </span>test_invalid_interval_should_panic_if_smaller() {
NgramTokenizer::all_ngrams(<span class="number">2</span>, <span class="number">1</span>);
}
<span class="attribute">#[test]
</span><span class="kw">fn </span>test_stutterring_iterator_empty() {
<span class="kw">let </span>rg: Vec&lt;usize&gt; = <span class="macro">vec!</span>[<span class="number">0</span>];
<span class="kw">let </span><span class="kw-2">mut </span>it = StutteringIterator::new(rg.into_iter(), <span class="number">1</span>, <span class="number">2</span>);
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">None</span>);
}
<span class="attribute">#[test]
</span><span class="kw">fn </span>test_stutterring_iterator() {
<span class="kw">let </span><span class="kw-2">mut </span>it = StutteringIterator::new(<span class="number">0</span>..<span class="number">10</span>, <span class="number">1</span>, <span class="number">2</span>);
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">0</span>, <span class="number">1</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">0</span>, <span class="number">2</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">1</span>, <span class="number">2</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">1</span>, <span class="number">3</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">2</span>, <span class="number">3</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">2</span>, <span class="number">4</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">3</span>, <span class="number">4</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">3</span>, <span class="number">5</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">4</span>, <span class="number">5</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">4</span>, <span class="number">6</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">5</span>, <span class="number">6</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">5</span>, <span class="number">7</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">6</span>, <span class="number">7</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">6</span>, <span class="number">8</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">7</span>, <span class="number">8</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">7</span>, <span class="number">9</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">Some</span>((<span class="number">8</span>, <span class="number">9</span>)));
<span class="macro">assert_eq!</span>(it.next(), <span class="prelude-val">None</span>);
}
}
</code></pre></div>
</section></div></main><div id="rustdoc-vars" data-root-path="../../../" data-current-crate="tantivy" data-themes="ayu,dark,light" data-resource-suffix="" data-rustdoc-version="1.66.0-nightly (5c8bff74b 2022-10-21)" ></div></body></html>