| <!DOCTYPE html><html lang="en"><head><meta charset="utf-8"><meta name="viewport" content="width=device-width, initial-scale=1.0"><meta name="generator" content="rustdoc"><meta name="description" content="Source of the Rust file `/root/.cargo/git/checkouts/tantivy-65d0bbbddbbd5d02/433372d/src/tokenizer/simple_tokenizer.rs`."><meta name="keywords" content="rust, rustlang, rust-lang"><title>simple_tokenizer.rs - source</title><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../SourceSerif4-Regular.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../FiraSans-Regular.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../FiraSans-Medium.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../SourceCodePro-Regular.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../SourceSerif4-Bold.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../SourceCodePro-Semibold.ttf.woff2"><link rel="stylesheet" href="../../../normalize.css"><link rel="stylesheet" href="../../../rustdoc.css" id="mainThemeStyle"><link rel="stylesheet" href="../../../ayu.css" disabled><link rel="stylesheet" href="../../../dark.css" disabled><link rel="stylesheet" href="../../../light.css" id="themeStyle"><script id="default-settings" ></script><script src="../../../storage.js"></script><script defer src="../../../source-script.js"></script><script defer src="../../../source-files.js"></script><script defer src="../../../main.js"></script><noscript><link rel="stylesheet" href="../../../noscript.css"></noscript><link rel="alternate icon" type="image/png" href="../../../favicon-16x16.png"><link rel="alternate icon" type="image/png" href="../../../favicon-32x32.png"><link rel="icon" type="image/svg+xml" href="../../../favicon.svg"></head><body class="rustdoc source"><!--[if lte IE 11]><div class="warning">This old browser is unsupported and will most likely display funky things.</div><![endif]--><nav class="sidebar"><a class="sidebar-logo" href="../../../tantivy/index.html"><div class="logo-container"> |
| <img src="http://fulmicoton.com/tantivy-logo/tantivy-logo.png" alt="logo"></div></a></nav><main><div class="width-limiter"><nav class="sub"><a class="sub-logo-container" href="../../../tantivy/index.html"> |
| <img src="http://fulmicoton.com/tantivy-logo/tantivy-logo.png" alt="logo"></a><form class="search-form"><div class="search-container"><span></span><input class="search-input" name="search" autocomplete="off" spellcheck="false" placeholder="Click or press ‘S’ to search, ‘?’ for more options…" type="search"><div id="help-button" title="help" tabindex="-1"><a href="../../../help.html">?</a></div><div id="settings-menu" tabindex="-1"><a href="../../../settings.html" title="settings"><img width="22" height="22" alt="Change settings" src="../../../wheel.svg"></a></div></div></form></nav><section id="main-content" class="content"><div class="example-wrap"><pre class="src-line-numbers"><span id="1">1</span> |
| <span id="2">2</span> |
| <span id="3">3</span> |
| <span id="4">4</span> |
| <span id="5">5</span> |
| <span id="6">6</span> |
| <span id="7">7</span> |
| <span id="8">8</span> |
| <span id="9">9</span> |
| <span id="10">10</span> |
| <span id="11">11</span> |
| <span id="12">12</span> |
| <span id="13">13</span> |
| <span id="14">14</span> |
| <span id="15">15</span> |
| <span id="16">16</span> |
| <span id="17">17</span> |
| <span id="18">18</span> |
| <span id="19">19</span> |
| <span id="20">20</span> |
| <span id="21">21</span> |
| <span id="22">22</span> |
| <span id="23">23</span> |
| <span id="24">24</span> |
| <span id="25">25</span> |
| <span id="26">26</span> |
| <span id="27">27</span> |
| <span id="28">28</span> |
| <span id="29">29</span> |
| <span id="30">30</span> |
| <span id="31">31</span> |
| <span id="32">32</span> |
| <span id="33">33</span> |
| <span id="34">34</span> |
| <span id="35">35</span> |
| <span id="36">36</span> |
| <span id="37">37</span> |
| <span id="38">38</span> |
| <span id="39">39</span> |
| <span id="40">40</span> |
| <span id="41">41</span> |
| <span id="42">42</span> |
| <span id="43">43</span> |
| <span id="44">44</span> |
| <span id="45">45</span> |
| <span id="46">46</span> |
| <span id="47">47</span> |
| <span id="48">48</span> |
| <span id="49">49</span> |
| <span id="50">50</span> |
| <span id="51">51</span> |
| <span id="52">52</span> |
| <span id="53">53</span> |
| <span id="54">54</span> |
| <span id="55">55</span> |
| <span id="56">56</span> |
| <span id="57">57</span> |
| <span id="58">58</span> |
| <span id="59">59</span> |
| <span id="60">60</span> |
| <span id="61">61</span> |
| <span id="62">62</span> |
| <span id="63">63</span> |
| <span id="64">64</span> |
| <span id="65">65</span> |
| <span id="66">66</span> |
| <span id="67">67</span> |
| <span id="68">68</span> |
| <span id="69">69</span> |
| <span id="70">70</span> |
| <span id="71">71</span> |
| <span id="72">72</span> |
| <span id="73">73</span> |
| <span id="74">74</span> |
| <span id="75">75</span> |
| <span id="76">76</span> |
| <span id="77">77</span> |
| <span id="78">78</span> |
| <span id="79">79</span> |
| <span id="80">80</span> |
| <span id="81">81</span> |
| <span id="82">82</span> |
| <span id="83">83</span> |
| <span id="84">84</span> |
| <span id="85">85</span> |
| <span id="86">86</span> |
| </pre><pre class="rust"><code><span class="kw">use </span>std::str::CharIndices; |
| |
| <span class="kw">use super</span>::{BoxTokenStream, Token, TokenStream, Tokenizer}; |
| |
| <span class="doccomment">/// Tokenize the text by splitting on whitespaces and punctuation. |
| </span><span class="attribute">#[derive(Clone)] |
| </span><span class="kw">pub struct </span>SimpleTokenizer; |
| |
| <span class="kw">pub struct </span>SimpleTokenStream<<span class="lifetime">'a</span>> { |
| text: <span class="kw-2">&</span><span class="lifetime">'a </span>str, |
| chars: CharIndices<<span class="lifetime">'a</span>>, |
| token: Token, |
| } |
| |
| <span class="kw">impl </span>Tokenizer <span class="kw">for </span>SimpleTokenizer { |
| <span class="kw">fn </span>token_stream<<span class="lifetime">'a</span>>(<span class="kw-2">&</span><span class="self">self</span>, text: <span class="kw-2">&</span><span class="lifetime">'a </span>str) -> BoxTokenStream<<span class="lifetime">'a</span>> { |
| BoxTokenStream::from(SimpleTokenStream { |
| text, |
| chars: text.char_indices(), |
| token: Token::default(), |
| }) |
| } |
| } |
| |
| <span class="kw">impl</span><<span class="lifetime">'a</span>> SimpleTokenStream<<span class="lifetime">'a</span>> { |
| <span class="comment">// search for the end of the current token. |
| </span><span class="kw">fn </span>search_token_end(<span class="kw-2">&mut </span><span class="self">self</span>) -> usize { |
| (<span class="kw-2">&mut </span><span class="self">self</span>.chars) |
| .filter(|<span class="kw-2">&</span>(<span class="kw">_</span>, <span class="kw-2">ref </span>c)| !c.is_alphanumeric()) |
| .map(|(offset, <span class="kw">_</span>)| offset) |
| .next() |
| .unwrap_or(<span class="self">self</span>.text.len()) |
| } |
| } |
| |
| <span class="kw">impl</span><<span class="lifetime">'a</span>> TokenStream <span class="kw">for </span>SimpleTokenStream<<span class="lifetime">'a</span>> { |
| <span class="kw">fn </span>advance(<span class="kw-2">&mut </span><span class="self">self</span>) -> bool { |
| <span class="self">self</span>.token.text.clear(); |
| <span class="self">self</span>.token.position = <span class="self">self</span>.token.position.wrapping_add(<span class="number">1</span>); |
| <span class="kw">while let </span><span class="prelude-val">Some</span>((offset_from, c)) = <span class="self">self</span>.chars.next() { |
| <span class="kw">if </span>c.is_alphanumeric() { |
| <span class="kw">let </span>offset_to = <span class="self">self</span>.search_token_end(); |
| <span class="self">self</span>.token.offset_from = offset_from; |
| <span class="self">self</span>.token.offset_to = offset_to; |
| <span class="self">self</span>.token.text.push_str(<span class="kw-2">&</span><span class="self">self</span>.text[offset_from..offset_to]); |
| <span class="kw">return </span><span class="bool-val">true</span>; |
| } |
| } |
| <span class="bool-val">false |
| </span>} |
| |
| <span class="kw">fn </span>token(<span class="kw-2">&</span><span class="self">self</span>) -> <span class="kw-2">&</span>Token { |
| <span class="kw-2">&</span><span class="self">self</span>.token |
| } |
| |
| <span class="kw">fn </span>token_mut(<span class="kw-2">&mut </span><span class="self">self</span>) -> <span class="kw-2">&mut </span>Token { |
| <span class="kw-2">&mut </span><span class="self">self</span>.token |
| } |
| } |
| |
| <span class="attribute">#[cfg(test)] |
| </span><span class="kw">mod </span>tests { |
| <span class="kw">use </span><span class="kw">crate</span>::tokenizer::tests::assert_token; |
| <span class="kw">use </span><span class="kw">crate</span>::tokenizer::{SimpleTokenizer, TextAnalyzer, Token}; |
| |
| <span class="attribute">#[test] |
| </span><span class="kw">fn </span>test_simple_tokenizer() { |
| <span class="kw">let </span>tokens = token_stream_helper(<span class="string">"Hello, happy tax payer!"</span>); |
| <span class="macro">assert_eq!</span>(tokens.len(), <span class="number">4</span>); |
| assert_token(<span class="kw-2">&</span>tokens[<span class="number">0</span>], <span class="number">0</span>, <span class="string">"Hello"</span>, <span class="number">0</span>, <span class="number">5</span>); |
| assert_token(<span class="kw-2">&</span>tokens[<span class="number">1</span>], <span class="number">1</span>, <span class="string">"happy"</span>, <span class="number">7</span>, <span class="number">12</span>); |
| assert_token(<span class="kw-2">&</span>tokens[<span class="number">2</span>], <span class="number">2</span>, <span class="string">"tax"</span>, <span class="number">13</span>, <span class="number">16</span>); |
| assert_token(<span class="kw-2">&</span>tokens[<span class="number">3</span>], <span class="number">3</span>, <span class="string">"payer"</span>, <span class="number">17</span>, <span class="number">22</span>); |
| } |
| |
| <span class="kw">fn </span>token_stream_helper(text: <span class="kw-2">&</span>str) -> Vec<Token> { |
| <span class="kw">let </span>a = TextAnalyzer::from(SimpleTokenizer); |
| <span class="kw">let </span><span class="kw-2">mut </span>token_stream = a.token_stream(text); |
| <span class="kw">let </span><span class="kw-2">mut </span>tokens: Vec<Token> = <span class="macro">vec!</span>[]; |
| <span class="kw">let </span><span class="kw-2">mut </span>add_token = |token: <span class="kw-2">&</span>Token| { |
| tokens.push(token.clone()); |
| }; |
| token_stream.process(<span class="kw-2">&mut </span>add_token); |
| tokens |
| } |
| } |
| </code></pre></div> |
| </section></div></main><div id="rustdoc-vars" data-root-path="../../../" data-current-crate="tantivy" data-themes="ayu,dark,light" data-resource-suffix="" data-rustdoc-version="1.66.0-nightly (5c8bff74b 2022-10-21)" ></div></body></html> |