Skip to content

Commit

Permalink
Update dependencies (#77)
Browse files Browse the repository at this point in the history
  • Loading branch information
mosuka authored Jun 18, 2023
1 parent e7711b9 commit 3d779c4
Show file tree
Hide file tree
Showing 3 changed files with 162 additions and 180 deletions.
12 changes: 6 additions & 6 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "lindera-tantivy"
version = "0.24.0"
version = "0.25.0"
edition = "2021"
description = "Lindera Tokenizer for Tantivy."
documentation = "https://docs.rs/lindera-tantivy"
Expand All @@ -23,14 +23,14 @@ ko-dic-compress = ["lindera-tokenizer/ko-dic-compress"]
cc-cedict-compress = ["lindera-tokenizer/cc-cedict-compress"]

[dependencies]
tantivy = "0.19.2"
tantivy = "0.20.2"

lindera-core = "0.24.0"
lindera-dictionary = "0.24.0"
lindera-tokenizer = "0.24.0"
lindera-core = "0.25.0"
lindera-dictionary = "0.25.0"
lindera-tokenizer = "0.25.0"

[dev-dependencies]
criterion = { version = "0.4.0", features = ["html_reports"] }
criterion = { version = "0.5.1", features = ["html_reports"] }

[[bench]]
name = "bench"
Expand Down
35 changes: 15 additions & 20 deletions src/stream.rs
Original file line number Diff line number Diff line change
@@ -1,30 +1,25 @@
use std::collections::VecDeque;

use tantivy::tokenizer::{Token, TokenStream};

pub struct LinderaTokenStream {
tokens: VecDeque<Token>,
token: Token,
}
use lindera_tokenizer::token::Token as LToken;

impl LinderaTokenStream {
pub fn new(tokens: VecDeque<Token>) -> Self {
Self {
tokens,
token: Default::default(),
}
}
pub struct LinderaTokenStream<'a> {
pub tokens: Vec<LToken<'a>>,
pub token: &'a mut Token,
}

impl TokenStream for LinderaTokenStream {
impl<'a> TokenStream for LinderaTokenStream<'a> {
fn advance(&mut self) -> bool {
match self.tokens.pop_front() {
Some(token) => {
self.token = token;
true
}
None => false,
if self.tokens.is_empty() {
return false;
}
let token = self.tokens.remove(0);
self.token.text = token.text.to_string();
self.token.offset_from = token.byte_start;
self.token.offset_to = token.byte_end;
self.token.position = token.position;
self.token.position_length = token.position_length;

true
}

fn token(&self) -> &Token {
Expand Down
Loading

0 comments on commit 3d779c4

Please sign in to comment.