diff --git a/doc/tokenizer.graphml b/doc/stream.graphml similarity index 100% rename from doc/tokenizer.graphml rename to doc/stream.graphml diff --git a/doc/tokenizer.png b/doc/stream.png similarity index 100% rename from doc/tokenizer.png rename to doc/stream.png diff --git a/src/lib.rs b/src/lib.rs index 959b604..c5b4340 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -12,7 +12,6 @@ extern crate rental; pub mod util; pub mod model; -pub mod logic; #[cfg(feature = "gtk")] pub mod view; diff --git a/src/logic/mod.rs b/src/logic/mod.rs deleted file mode 100644 index d9b8fd1..0000000 --- a/src/logic/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod tokenizer; diff --git a/src/model/document.rs b/src/model/document.rs index 0f41128..462c2da 100644 --- a/src/model/document.rs +++ b/src/model/document.rs @@ -110,7 +110,7 @@ impl Document { pub fn load_from_testing_structure>(path: P) -> Result { let xml = std::fs::read_to_string(path)?; let xml = roxmltree::Document::parse(&xml)?; - let tc = crate::logic::tokenizer::xml::Testcase::from_xml(&xml); + let tc = crate::model::listing::stream::xml::Testcase::from_xml(&xml); Ok(Document { root: tc.structure, diff --git a/src/model/listing/cursor.rs b/src/model/listing/cursor.rs index e9d3a3a..a5328d2 100644 --- a/src/model/listing/cursor.rs +++ b/src/model/listing/cursor.rs @@ -4,10 +4,10 @@ use crate::model::addr; use crate::model::document; use crate::model::document::structure; use crate::model::listing::line; +use crate::model::listing::stream; use crate::model::listing::token; use crate::model::listing::token::TokenKind; use crate::model::versioned::Versioned; -use crate::logic::tokenizer; use enum_dispatch::enum_dispatch; use tracing::instrument; @@ -81,10 +81,10 @@ pub enum HorizontalPosition { #[derive(Debug)] pub struct Cursor { - tokenizer: tokenizer::Tokenizer, + position: stream::Position, line: line::Line, - line_begin: tokenizer::Tokenizer, - line_end: tokenizer::Tokenizer, + line_begin: stream::Position, + line_end: stream::Position, desired_horizontal_position: Option, pub class: CursorClass, document: sync::Arc, @@ -100,27 +100,27 @@ enum UpdateMode { } impl CursorClass { - fn place_forward(tokenizer: &mut tokenizer::Tokenizer, offset: addr::Address, hint: &PlacementHint) -> Result { + fn place_forward(position: &mut stream::Position, offset: addr::Address, hint: &PlacementHint) -> Result { loop { - match tokenizer.gen_token() { - tokenizer::TokenGenerationResult::Ok(token) => match CursorClass::new_placement(token, offset, hint) { + match position.gen_token() { + stream::TokenGenerationResult::Ok(token) => match CursorClass::new_placement(token, offset, hint) { Ok(cursor) => return Ok(cursor), /* failed to place on this token; try the next */ Err(_) => {}, }, - tokenizer::TokenGenerationResult::Skip => {}, - tokenizer::TokenGenerationResult::Boundary => return Err(PlacementFailure::HitBottomOfAddressSpace) + stream::TokenGenerationResult::Skip => {}, + stream::TokenGenerationResult::Boundary => return Err(PlacementFailure::HitBottomOfAddressSpace) }; - if !tokenizer.move_next() { + if !position.move_next() { return Err(PlacementFailure::HitBottomOfAddressSpace) } } } - fn place_backward(tokenizer: &mut tokenizer::Tokenizer, offset: addr::Address, hint: &PlacementHint) -> Result { + fn place_backward(position: &mut stream::Position, offset: addr::Address, hint: &PlacementHint) -> Result { loop { - match tokenizer.prev() { + match position.prev() { Some(token) => match CursorClass::new_placement(token, offset, hint) { Ok(cursor) => return Ok(cursor), /* failed to place on this token; try the previous */ @@ -135,26 +135,26 @@ impl CursorClass { impl Cursor { pub fn new(document: sync::Arc) -> Cursor { let root = document.root.clone(); - Self::place_tokenizer(document, tokenizer::Tokenizer::at_beginning(root), addr::unit::NULL, &PlacementHint::default()) + Self::place_position(document, stream::Position::at_beginning(root), addr::unit::NULL, &PlacementHint::default()) } pub fn place(document: sync::Arc, path: &structure::Path, offset: addr::Address, hint: PlacementHint) -> Cursor { let root = document.root.clone(); - Self::place_tokenizer(document, tokenizer::Tokenizer::at_path(root, path, offset), offset, &hint) + Self::place_position(document, stream::Position::at_path(root, path, offset), offset, &hint) } - fn place_tokenizer(document: sync::Arc, origin: tokenizer::Tokenizer, offset: addr::Address, hint: &PlacementHint) -> Self { - let mut tokenizer = origin.clone(); + fn place_position(document: sync::Arc, origin: stream::Position, offset: addr::Address, hint: &PlacementHint) -> Self { + let mut position = origin.clone(); - let class = match CursorClass::place_forward(&mut tokenizer, offset, hint) { + let class = match CursorClass::place_forward(&mut position, offset, hint) { Ok(cc) => cc, Err(PlacementFailure::HitBottomOfAddressSpace) => { - tokenizer = origin.clone(); - match CursorClass::place_backward(&mut tokenizer, offset, hint) { + position = origin.clone(); + match CursorClass::place_backward(&mut position, offset, hint) { Ok(cc) => cc, Err(PlacementFailure::HitTopOfAddressSpace) => match hint { PlacementHint::LastDitch => panic!("expected to be able to place cursor somewhere"), - _ => return Self::place_tokenizer(document, origin, offset, &PlacementHint::LastDitch), + _ => return Self::place_position(document, origin, offset, &PlacementHint::LastDitch), }, Err(_) => panic!("unexpected error from CursorClass::place_backward") } @@ -162,13 +162,13 @@ impl Cursor { Err(_) => panic!("unexpected error from CursorClass::place_forward") }; - let mut line_end = tokenizer.clone(); - let (line, mut line_begin, _) = line::Line::containing_tokenizer(&mut line_end); + let mut line_end = position.clone(); + let (line, mut line_begin, _) = line::Line::containing_position(&mut line_end); line_begin.canonicalize_next(); line_end.canonicalize_next(); Cursor { - tokenizer, + position, line, line_begin, line_end, @@ -182,10 +182,10 @@ impl Cursor { #[instrument] fn update_internal(&mut self, document: &sync::Arc, update_mode: UpdateMode) { /* if we're using an outdated structure hierarchy root, make a - * new tokenizer and try to put the cursor nearby in the new + * new position and try to put the cursor nearby in the new * hierarchy. */ if self.document.is_outdated(document) { - let mut options = tokenizer::PortOptionsBuilder::new(); + let mut options = stream::PortOptionsBuilder::new(); options = options.additional_offset(self.class.get_offset()); match update_mode { @@ -196,17 +196,17 @@ impl Cursor { }; let mut options = options.build(); - let mut tokenizer = self.tokenizer.clone(); + let mut position = self.position.clone(); document.changes_since(&self.document, &mut |document, change| { - tokenizer.port_change( + position.port_change( &document.root, change, &mut options); }); - let offset = tokenizer.structure_position_offset() + options.additional_offset.unwrap_or(addr::unit::ZERO); - *self = Self::place_tokenizer(document.clone(), tokenizer, offset, &self.class.get_placement_hint()); + let offset = position.structure_position_offset() + options.additional_offset.unwrap_or(addr::unit::ZERO); + *self = Self::place_position(document.clone(), position, offset, &self.class.get_placement_hint()); } self.class.update(document); @@ -226,18 +226,18 @@ impl Cursor { /// Finds the next token that the supplied function returns Ok(_) for, and mutates our state to account for being on a new token. If we hit the end of the token stream without seeing Ok(_), the state is not mutated. fn next_token_matching Result>(&mut self, acceptor: F) -> Option { - let mut tokenizer = self.tokenizer.clone(); + let mut position = self.position.clone(); let mut line = self.line.clone(); let mut line_begin = self.line_begin.clone(); let mut line_end = self.line_end.clone(); let obj = loop { - match tokenizer.next_preincrement() { + match position.next_preincrement() { None => return None, Some(token) => { - while tokenizer >= line_end { + while position >= line_end { line_begin = line_end.clone(); - line = line::Line::next_from_tokenizer(&mut line_end); + line = line::Line::next_from_position(&mut line_end); line_end.canonicalize_next(); } @@ -257,25 +257,25 @@ impl Cursor { self.line = line; self.line_begin = line_begin; self.line_end = line_end; - self.tokenizer = tokenizer; + self.position = position; Some(obj) } /// See [next_token_matching]. fn prev_token_matching Result>(&mut self, acceptor: F) -> Option { - let mut tokenizer = self.tokenizer.clone(); + let mut position = self.position.clone(); let mut line = self.line.clone(); let mut line_begin = self.line_begin.clone(); let mut line_end = self.line_end.clone(); let obj = loop { - match tokenizer.prev() { + match position.prev() { None => return None, Some(token) => { - while tokenizer < line_begin { + while position < line_begin { line_end = line_begin.clone(); - line = line::Line::prev_from_tokenizer(&mut line_begin); + line = line::Line::prev_from_position(&mut line_begin); line_begin.canonicalize_next(); } @@ -295,7 +295,7 @@ impl Cursor { self.line = line; self.line_begin = line_begin; self.line_end = line_end; - self.tokenizer = tokenizer; + self.position = position; Some(obj) } @@ -327,13 +327,13 @@ impl Cursor { pub fn move_left_large(&mut self) -> MovementResult { self.movement(|c| c.move_left_large(), TransitionHint::MoveLeftLarge) } pub fn move_right_large(&mut self) -> MovementResult { self.movement(|c| c.move_right_large(), TransitionHint::UnspecifiedRight) } - fn move_vertically(&mut self, line: line::Line, line_begin: tokenizer::Tokenizer, line_end: tokenizer::Tokenizer) -> Result { + fn move_vertically(&mut self, line: line::Line, line_begin: stream::Position, line_end: stream::Position) -> Result { let dhp = match &self.desired_horizontal_position { Some(x) => x.clone(), None => self.class.get_horizontal_position_in_line(&self.line), }; - let mut tokenizer = line_begin.clone(); + let mut position = line_begin.clone(); let hint = TransitionHint::MoveVertical { horizontal_position: &dhp, @@ -342,23 +342,23 @@ impl Cursor { }; self.class = loop { - match tokenizer.gen_token() { - tokenizer::TokenGenerationResult::Ok(token) => match CursorClass::new_transition(token, &hint) { + match position.gen_token() { + stream::TokenGenerationResult::Ok(token) => match CursorClass::new_transition(token, &hint) { Ok(cc) => break cc, Err(_tok) => {}, }, - tokenizer::TokenGenerationResult::Skip => {}, - tokenizer::TokenGenerationResult::Boundary => return Ok(MovementResult::HitEnd), + stream::TokenGenerationResult::Skip => {}, + stream::TokenGenerationResult::Boundary => return Ok(MovementResult::HitEnd), }; - if !tokenizer.move_next() { + if !position.move_next() { return Ok(MovementResult::HitEnd); } - if tokenizer >= line_end { + if position >= line_end { break loop { - if let Some(token) = tokenizer.prev() { - if tokenizer < line_begin { + if let Some(token) = position.prev() { + if position < line_begin { return Err((line_begin, line_end)); } @@ -375,7 +375,7 @@ impl Cursor { assert!(line.iter_tokens().any(|t| t == self.class.get_token())); - self.tokenizer = tokenizer; + self.position = position; self.line = line; self.line_begin = line_begin; self.line_end = line_end; @@ -390,7 +390,7 @@ impl Cursor { loop { let line_end = line_begin.clone(); - let line = line::Line::prev_from_tokenizer(&mut line_begin); + let line = line::Line::prev_from_position(&mut line_begin); if line.is_empty() { return MovementResult::HitStart; } @@ -407,7 +407,7 @@ impl Cursor { loop { let line_begin = line_end.clone(); - let line = line::Line::next_from_tokenizer(&mut line_end); + let line = line::Line::next_from_position(&mut line_end); if line.is_empty() { return MovementResult::HitEnd; } @@ -440,15 +440,15 @@ impl Cursor { */ pub fn structure_path(&self) -> structure::Path { - self.tokenizer.structure_path() + self.position.structure_path() } pub fn structure_child_index(&self) -> usize { - self.tokenizer.structure_position_child() + self.position.structure_position_child() } pub fn structure_offset(&self) -> addr::Address { - self.tokenizer.structure_position_offset() + self.class.get_offset() + self.position.structure_position_offset() + self.class.get_offset() } pub fn document(&self) -> sync::Arc { @@ -498,16 +498,16 @@ impl CursorClass { } #[instrument] - fn try_move_prev_token(&self, mut tokenizer: tokenizer::Tokenizer, hint: TransitionHint) -> Option<(CursorClass, tokenizer::Tokenizer)> { + fn try_move_prev_token(&self, mut position: stream::Position, hint: TransitionHint) -> Option<(CursorClass, stream::Position)> { loop { - match tokenizer.prev() { + match position.prev() { None => { return None }, Some(token) => { match Self::new_transition(token, &hint) { Ok(cc) => { - return Some((cc, tokenizer)); + return Some((cc, position)); }, Err(_token) => { /* skip this token and try the one before it */ @@ -519,16 +519,16 @@ impl CursorClass { } #[instrument] - fn try_move_next_token(&self, mut tokenizer: tokenizer::Tokenizer, hint: TransitionHint) -> Option<(CursorClass, tokenizer::Tokenizer)> { + fn try_move_next_token(&self, mut position: stream::Position, hint: TransitionHint) -> Option<(CursorClass, stream::Position)> { loop { - match tokenizer.next_preincrement() { + match position.next_preincrement() { None => { return None }, Some(token) => { match Self::new_transition(token, &hint) { Ok(cc) => { - return Some((cc, tokenizer)); + return Some((cc, position)); }, Err(_) => { /* skip this token and try the one after it */ @@ -567,7 +567,7 @@ pub enum TransitionHint<'a> { MoveVertical { horizontal_position: &'a HorizontalPosition, line: &'a line::Line, - line_end: &'a tokenizer::Tokenizer, + line_end: &'a stream::Position, }, EndOfLine, } @@ -928,15 +928,15 @@ mod tests { let mut cursor = Cursor::place(document, &vec![], 0x0.into(), PlacementHint::Unused); println!("initial:"); println!(" line: {:?}", cursor.line); - println!(" line tokenizers: {:?}-{:?}", cursor.line_begin, cursor.line_end); + println!(" line positions: {:?}-{:?}", cursor.line_begin, cursor.line_end); cursor.move_up(); println!("after move up 1:"); println!(" line: {:?}", cursor.line); - println!(" line tokenizers: {:?}-{:?}", cursor.line_begin, cursor.line_end); + println!(" line positions: {:?}-{:?}", cursor.line_begin, cursor.line_end); cursor.move_up(); println!("after move up 2:"); println!(" line: {:?}", cursor.line); - println!(" line tokenizers: {:?}-{:?}", cursor.line_begin, cursor.line_end); + println!(" line positions: {:?}-{:?}", cursor.line_begin, cursor.line_end); cursor.move_right(); } diff --git a/src/model/listing/line.rs b/src/model/listing/line.rs index 0a5d0a5..7c43f08 100644 --- a/src/model/listing/line.rs +++ b/src/model/listing/line.rs @@ -8,9 +8,9 @@ use std::sync; use crate::model::addr; use crate::model::document::structure; +use crate::model::listing::stream; use crate::model::listing::token; use crate::model::listing::token::TokenKind; -use crate::logic::tokenizer; use crate::util; #[derive(Clone)] @@ -56,19 +56,19 @@ impl Line { } } - /// Figures out what line contains the token immediately after the tokenizer's position. Moves the referenced tokenizer to the end of that line, and returns the line, a tokenizer pointing to the beginning of the line, and the index of the specified token within that line. - pub fn containing_tokenizer(tokenizer: &mut Tokenizer) -> (Self, Tokenizer, usize) { + /// Figures out what line contains the token immediately after the position. Moves the referenced position to the end of that line, and returns the line, a position pointing to the beginning of the line, and the index of the specified token within that line. + pub fn containing_position(position: &mut Position) -> (Self, Position, usize) { /* Put the first token on the line. */ let mut line = Line::from_token(loop { - match tokenizer.gen_token() { - tokenizer::TokenGenerationResult::Ok(token) => break token, + match position.gen_token() { + stream::TokenGenerationResult::Ok(token) => break token, /* If we hit the end, just return an empty line. */ - tokenizer::TokenGenerationResult::Skip => if tokenizer.move_next() { continue } else { return (Self::empty(), tokenizer.clone(), 0) }, - tokenizer::TokenGenerationResult::Boundary => return (Self::empty(), tokenizer.clone(), 0) + stream::TokenGenerationResult::Skip => if position.move_next() { continue } else { return (Self::empty(), position.clone(), 0) }, + stream::TokenGenerationResult::Boundary => return (Self::empty(), position.clone(), 0) } - }, tokenizer.in_summary()); + }, position.in_summary()); - let mut prev = tokenizer.clone(); + let mut prev = position.clone(); let mut index = 0; /* Walk `prev` back to the beginning of the line. */ @@ -78,9 +78,9 @@ impl Line { } if match line.push_front(match prev.gen_token() { - tokenizer::TokenGenerationResult::Ok(token) => token, - tokenizer::TokenGenerationResult::Skip => continue, - tokenizer::TokenGenerationResult::Boundary => break, + stream::TokenGenerationResult::Ok(token) => token, + stream::TokenGenerationResult::Skip => continue, + stream::TokenGenerationResult::Boundary => break, }) { LinePushResult::Accepted => { index+= 1; continue }, LinePushResult::Completed => { index+= 1; break }, @@ -93,40 +93,40 @@ impl Line { } } - /* Walk `tokenizer` to the end of the line. */ - tokenizer.move_next(); + /* Walk `position` to the end of the line. */ + position.move_next(); loop { - match line.push_back(match tokenizer.gen_token() { - tokenizer::TokenGenerationResult::Ok(token) => token, - tokenizer::TokenGenerationResult::Skip => if tokenizer.move_next() { continue } else { break }, - tokenizer::TokenGenerationResult::Boundary => break, + match line.push_back(match position.gen_token() { + stream::TokenGenerationResult::Ok(token) => token, + stream::TokenGenerationResult::Skip => if position.move_next() { continue } else { break }, + stream::TokenGenerationResult::Boundary => break, }) { - LinePushResult::Accepted => tokenizer.move_next(), - LinePushResult::Completed => { tokenizer.move_next(); break }, + LinePushResult::Accepted => position.move_next(), + LinePushResult::Completed => { position.move_next(); break }, LinePushResult::Rejected => break, - LinePushResult::BadPosition => tokenizer.move_next(), + LinePushResult::BadPosition => position.move_next(), }; } // TODO: move this to tests? - assert_eq!(line, Self::next_from_tokenizer(&mut prev.clone())); + assert_eq!(line, Self::next_from_position(&mut prev.clone())); (line, prev, index) } - /// Returns the line ending at the tokenizer's current position, and moves the tokenizer to the beginning of that line. - pub fn prev_from_tokenizer(tokenizer: &mut impl tokenizer::AbstractTokenizer) -> Self { + /// Returns the line ending at the position, and moves the position to the beginning of that line. + pub fn prev_from_position(position: &mut impl stream::AbstractPosition) -> Self { let mut line = Self::empty(); loop { - if !tokenizer.move_prev() { + if !position.move_prev() { break; } - if match line.push_front(match tokenizer.gen_token() { - tokenizer::TokenGenerationResult::Ok(token) => token, - tokenizer::TokenGenerationResult::Skip => continue, - tokenizer::TokenGenerationResult::Boundary => break, + if match line.push_front(match position.gen_token() { + stream::TokenGenerationResult::Ok(token) => token, + stream::TokenGenerationResult::Skip => continue, + stream::TokenGenerationResult::Boundary => break, }) { LinePushResult::Accepted => continue, LinePushResult::Completed => break, @@ -134,7 +134,7 @@ impl Line { LinePushResult::BadPosition => false, } { /* roll the state back */ - assert!(tokenizer.move_next()); + assert!(position.move_next()); break; } } @@ -142,20 +142,20 @@ impl Line { line } - /// Returns the line beginning at the tokenizer's current position, and moves the tokenizer to the end of that line. - pub fn next_from_tokenizer(tokenizer: &mut impl tokenizer::AbstractTokenizer) -> Self { + /// Returns the line beginning at the position, and moves the position to the end of that line. + pub fn next_from_position(position: &mut impl stream::AbstractPosition) -> Self { let mut line = Line::empty(); loop { - match line.push_back(match tokenizer.gen_token() { - tokenizer::TokenGenerationResult::Ok(token) => token, - tokenizer::TokenGenerationResult::Skip => if tokenizer.move_next() { continue } else { break }, - tokenizer::TokenGenerationResult::Boundary => break, + match line.push_back(match position.gen_token() { + stream::TokenGenerationResult::Ok(token) => token, + stream::TokenGenerationResult::Skip => if position.move_next() { continue } else { break }, + stream::TokenGenerationResult::Boundary => break, }) { - LinePushResult::Accepted => tokenizer.move_next(), - LinePushResult::Completed => { tokenizer.move_next(); break }, + LinePushResult::Accepted => position.move_next(), + LinePushResult::Completed => { position.move_next(); break }, LinePushResult::Rejected => break, - LinePushResult::BadPosition => tokenizer.move_next(), + LinePushResult::BadPosition => position.move_next(), }; } diff --git a/src/model/listing/mod.rs b/src/model/listing/mod.rs index e5ad3b9..b5c8f39 100644 --- a/src/model/listing/mod.rs +++ b/src/model/listing/mod.rs @@ -1,4 +1,5 @@ pub mod cursor; pub mod line; +pub mod stream; pub mod token; pub mod window; diff --git a/src/logic/tokenizer.rs b/src/model/listing/stream.rs similarity index 73% rename from src/logic/tokenizer.rs rename to src/model/listing/stream.rs index ec113f5..a7fd6fd 100644 --- a/src/logic/tokenizer.rs +++ b/src/model/listing/stream.rs @@ -1,13 +1,6 @@ //! This module includes the logic that converts from a document structure //! hierarchy into a seekable stream of tokens. -// TODO: rework the concept of a tokenizer into a TokenCursor or -// something like it. also, to reconcile the two different -// interpretations of movement (re: turning around directions, whether -// a position is on an token or on a border), we should expose two -// wrapper unit types that you have to do all movement through to -// specify which type of movement you want. - use std::sync; use crate::model::addr; @@ -20,7 +13,7 @@ use crate::model::listing::token; use tracing::instrument; #[derive(Clone, Debug, PartialEq, Eq)] -enum TokenizerState { +enum PositionState { PreBlank, Title, @@ -52,28 +45,28 @@ enum TokenizerState { } #[derive(Clone, PartialEq, Eq, Debug)] -enum TokenizerDescent { +enum Descent { Child(usize), ChildSummary(usize), MySummary, } #[derive(Clone)] -pub struct TokenizerStackEntry { - stack: Option>, - descent: TokenizerDescent, +pub struct StackEntry { + stack: Option>, + descent: Descent, /// How many nodes deep in the hierarchy generated tokens should appear to be (plus or minus one depending on token type) apparent_depth: usize, - /// How long the [stack] chain actually is. Used when comparing Tokenizers. + /// How long the [stack] chain actually is. Used when comparing Positions. logical_depth: usize, node: sync::Arc, node_addr: addr::Address, } /* This lets us provide an alternate, simpler implementation to - * certain unit tests to help isolate bugs to either Tokenizer logic + * certain unit tests to help isolate bugs to either Position logic * or Window/Line logic. */ -pub trait AbstractTokenizer: Clone { +pub trait AbstractPosition: Clone { fn at_beginning(root: sync::Arc) -> Self; fn at_path(root: sync::Arc, path: &structure::Path, offset: addr::Address) -> Self; fn port_change(&mut self, new_doc: &sync::Arc, change: &document::change::Change); @@ -88,15 +81,15 @@ pub trait AbstractTokenizer: Clone { } #[derive(Clone)] -pub struct Tokenizer { +pub struct Position { /* invariants: - stack should always contain a path all the way back to the root node */ - stack: Option>, - state: TokenizerState, + stack: Option>, + state: PositionState, /// How many nodes deep in the hierarchy generated tokens should appear to be (plus or minus one depending on token type) apparent_depth: usize, - /// How long the [stack] chain actually is. Used when comparing Tokenizers. + /// How long the [stack] chain actually is. Used when comparing Positions. logical_depth: usize, pub node: sync::Arc, node_addr: addr::Address, @@ -113,20 +106,20 @@ enum AscendDirection { Prev, Next } -struct TokenizerStackDebugHelper<'a>(&'a Option>); +struct StackDebugHelper<'a>(&'a Option>); -impl std::fmt::Debug for Tokenizer { +impl std::fmt::Debug for Position { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Tokenizer") + f.debug_struct("Position") .field("state", &self.state) .field("node", &self.node.props.name) .field("node_addr", &self.node_addr) - .field("stack", &TokenizerStackDebugHelper(&self.stack)) + .field("stack", &StackDebugHelper(&self.stack)) .finish_non_exhaustive() } } -impl<'a> std::fmt::Debug for TokenizerStackDebugHelper<'a> { +impl<'a> std::fmt::Debug for StackDebugHelper<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut dl = f.debug_list(); let mut i = self.0; @@ -140,7 +133,7 @@ impl<'a> std::fmt::Debug for TokenizerStackDebugHelper<'a> { } } -impl std::fmt::Debug for TokenizerStackEntry { +impl std::fmt::Debug for StackEntry { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Entry") .field("descent", &self.descent) @@ -187,7 +180,7 @@ enum PortStackMode { struct PortStackState { mode: PortStackMode, current_path: structure::Path, - new_stack: Option>, + new_stack: Option>, apparent_depth: usize, logical_depth: usize, node_addr: addr::Address, @@ -199,7 +192,7 @@ impl std::fmt::Debug for PortStackState { f.debug_struct("PortStackState") .field("mode", &self.mode) .field("current_path", &self.current_path) - .field("new_stack", &TokenizerStackDebugHelper(&self.new_stack)) + .field("new_stack", &StackDebugHelper(&self.new_stack)) .field("apparent_depth", &self.apparent_depth) .field("logical_depth", &self.logical_depth) .field("node_addr", &self.node_addr) @@ -210,7 +203,7 @@ impl std::fmt::Debug for PortStackState { #[derive(Debug)] enum IntermediatePortState { - Finished(TokenizerState), + Finished(PositionState), NormalContent(Option, usize), SummaryLabel(usize), @@ -245,12 +238,12 @@ impl Default for PortOptionsBuilder { } } -impl Tokenizer { - /// Creates a new tokenizer seeked to the root of the structure hierarchy and the beginning of the token stream. - pub fn at_beginning(root: sync::Arc) -> Tokenizer { - Tokenizer { +impl Position { + /// Creates a new position seeked to the root of the structure hierarchy and the beginning of the token stream. + pub fn at_beginning(root: sync::Arc) -> Position { + Position { stack: None, - state: TokenizerState::PreBlank, + state: PositionState::PreBlank, apparent_depth: 0, logical_depth: 0, node: root, @@ -258,8 +251,8 @@ impl Tokenizer { } } - /// Creates a new tokenizer positioned at a specific offset within the node at the given path. - pub fn at_path(root: sync::Arc, path: &structure::Path, offset: addr::Address) -> Tokenizer { + /// Creates a new position positioned at a specific offset within the node at the given path. + pub fn at_path(root: sync::Arc, path: &structure::Path, offset: addr::Address) -> Position { let mut node = &root; let mut node_addr = addr::unit::NULL; let mut apparent_depth = 0; @@ -273,9 +266,9 @@ impl Tokenizer { for child_index in path { if !summary_prev && summary_next { - stack = Some(sync::Arc::new(TokenizerStackEntry { + stack = Some(sync::Arc::new(StackEntry { stack: stack.take(), - descent: TokenizerDescent::MySummary, + descent: Descent::MySummary, apparent_depth, logical_depth, node: node.clone(), @@ -287,9 +280,9 @@ impl Tokenizer { summary_prev = summary_next; - stack = Some(sync::Arc::new(TokenizerStackEntry { + stack = Some(sync::Arc::new(StackEntry { stack: stack.take(), - descent: if summary_prev { TokenizerDescent::ChildSummary(*child_index) } else { TokenizerDescent::Child(*child_index) }, + descent: if summary_prev { Descent::ChildSummary(*child_index) } else { Descent::Child(*child_index) }, apparent_depth, logical_depth, node: node.clone(), @@ -308,9 +301,9 @@ impl Tokenizer { }; } - let mut tokenizer = Tokenizer { + let mut position = Position { stack, - state: if summary_prev { TokenizerState::SummaryValueBegin } else { TokenizerState::PreBlank }, + state: if summary_prev { PositionState::SummaryValueBegin } else { PositionState::PreBlank }, apparent_depth, logical_depth, node: node.clone(), @@ -318,13 +311,13 @@ impl Tokenizer { }; if offset > addr::unit::NULL { - tokenizer.seek_in_node_to_offset(offset, summary_next); + position.seek_in_node_to_offset(offset, summary_next); } - tokenizer + position } - /// Applies a single change to the tokenizer state. + /// Applies a single change to the position state. #[instrument] pub fn port_change(&mut self, new_root: &sync::Arc, change: &change::Change, options: &mut PortOptions) { /* Recreate our stack, processing descents and such, leaving off with some information about the node we're actually on now. */ @@ -339,9 +332,9 @@ impl Tokenizer { (PortStackMode::Deleted { node: _, first_deleted_child_index, offset_within_parent: _, .. }, _) if stack_state.children_summarized() => IntermediatePortState::SummaryLabel(*first_deleted_child_index), (PortStackMode::Deleted { node: _, first_deleted_child_index, offset_within_parent, .. }, state) => { let offset_within_child = match state { - TokenizerState::MetaContent(offset, _) => *offset, - TokenizerState::Hexdump { extent, .. } => extent.begin, - TokenizerState::Hexstring(extent, _) => extent.begin, + PositionState::MetaContent(offset, _) => *offset, + PositionState::Hexdump { extent, .. } => extent.begin, + PositionState::Hexstring(extent, _) => extent.begin, _ => addr::unit::NULL }; @@ -350,80 +343,80 @@ impl Tokenizer { /* We were in a child that got destructured. Our old state tells us about where we were in that child. */ (PortStackMode::Destructuring { destructured_childhood, destructured_child_index, summary: false }, state) => match state { - TokenizerState::PreBlank - | TokenizerState::Title - | TokenizerState::SummaryPreamble - | TokenizerState::SummaryOpener - | TokenizerState::SummaryValueBegin => IntermediatePortState::NormalContent(Some(destructured_childhood.offset), *destructured_child_index), + PositionState::PreBlank + | PositionState::Title + | PositionState::SummaryPreamble + | PositionState::SummaryOpener + | PositionState::SummaryValueBegin => IntermediatePortState::NormalContent(Some(destructured_childhood.offset), *destructured_child_index), - TokenizerState::MetaContent(offset, index) => IntermediatePortState::NormalContent(Some(destructured_childhood.offset + offset.to_size()), destructured_child_index + *index), - TokenizerState::Hexdump { extent, index, .. } => IntermediatePortState::NormalContent(Some(destructured_childhood.offset + extent.begin.to_size()), destructured_child_index + *index), - TokenizerState::Hexstring(extent, index) => IntermediatePortState::NormalContent(Some(destructured_childhood.offset + extent.begin.to_size()), destructured_child_index + *index), - TokenizerState::SummaryLeaf => IntermediatePortState::NormalContent(Some(destructured_childhood.offset), *destructured_child_index), + PositionState::MetaContent(offset, index) => IntermediatePortState::NormalContent(Some(destructured_childhood.offset + offset.to_size()), destructured_child_index + *index), + PositionState::Hexdump { extent, index, .. } => IntermediatePortState::NormalContent(Some(destructured_childhood.offset + extent.begin.to_size()), destructured_child_index + *index), + PositionState::Hexstring(extent, index) => IntermediatePortState::NormalContent(Some(destructured_childhood.offset + extent.begin.to_size()), destructured_child_index + *index), + PositionState::SummaryLeaf => IntermediatePortState::NormalContent(Some(destructured_childhood.offset), *destructured_child_index), - TokenizerState::SummaryLabel(i) - | TokenizerState::SummarySeparator(i) => IntermediatePortState::NormalContent(None, destructured_child_index + *i), + PositionState::SummaryLabel(i) + | PositionState::SummarySeparator(i) => IntermediatePortState::NormalContent(None, destructured_child_index + *i), - TokenizerState::SummaryValueEnd - | TokenizerState::SummaryEpilogue - | TokenizerState::SummaryCloser - | TokenizerState::PostBlank - | TokenizerState::End => IntermediatePortState::NormalContent(Some(destructured_childhood.end()), destructured_child_index + 1), + PositionState::SummaryValueEnd + | PositionState::SummaryEpilogue + | PositionState::SummaryCloser + | PositionState::PostBlank + | PositionState::End => IntermediatePortState::NormalContent(Some(destructured_childhood.end()), destructured_child_index + 1), }, // TODO: try harder here (PortStackMode::Destructuring { destructured_child_index, summary: true, .. }, _) => IntermediatePortState::SummaryLabel(*destructured_child_index), /* If a node was switched from ChildrenDisplay::Full to ChildrenDisplay::Summary, we want to stay on the * title or preblank if we were on them, but otherwise we pretend as if we're in PortStackMode::Summary and - * fixup the TokenizerDescent::MySummary later. */ - (PortStackMode::Normal, TokenizerState::PreBlank) => IntermediatePortState::Finished(TokenizerState::PreBlank), - (PortStackMode::Normal, TokenizerState::Title) => IntermediatePortState::Finished(TokenizerState::Title), + * fixup the Descent::MySummary later. */ + (PortStackMode::Normal, PositionState::PreBlank) => IntermediatePortState::Finished(PositionState::PreBlank), + (PortStackMode::Normal, PositionState::Title) => IntermediatePortState::Finished(PositionState::Title), (PortStackMode::Normal, state) if !stack_state.children_summarized() => match state { - TokenizerState::PreBlank => IntermediatePortState::Finished(TokenizerState::PreBlank), - TokenizerState::Title => IntermediatePortState::Finished(TokenizerState::Title), + PositionState::PreBlank => IntermediatePortState::Finished(PositionState::PreBlank), + PositionState::Title => IntermediatePortState::Finished(PositionState::Title), - TokenizerState::MetaContent(offset, index) => IntermediatePortState::NormalContent(Some(*offset), *index), - TokenizerState::Hexdump { extent, index, .. } => IntermediatePortState::NormalContent(Some(extent.begin), *index), - TokenizerState::Hexstring(extent, index) => IntermediatePortState::NormalContent(Some(extent.begin), *index), + PositionState::MetaContent(offset, index) => IntermediatePortState::NormalContent(Some(*offset), *index), + PositionState::Hexdump { extent, index, .. } => IntermediatePortState::NormalContent(Some(extent.begin), *index), + PositionState::Hexstring(extent, index) => IntermediatePortState::NormalContent(Some(extent.begin), *index), - TokenizerState::SummaryPreamble => IntermediatePortState::Finished(TokenizerState::Title), - TokenizerState::SummaryOpener => IntermediatePortState::NormalContent(Some(addr::unit::NULL), 0), - TokenizerState::SummaryLabel(i) => IntermediatePortState::NormalContent(None, *i), - TokenizerState::SummarySeparator(i) => IntermediatePortState::NormalContent(None, *i), - TokenizerState::SummaryCloser => IntermediatePortState::Finished(TokenizerState::End), - TokenizerState::SummaryEpilogue => IntermediatePortState::Finished(TokenizerState::PostBlank), - TokenizerState::SummaryValueBegin => IntermediatePortState::Finished(TokenizerState::Title), - TokenizerState::SummaryLeaf => IntermediatePortState::NormalContent(Some(addr::unit::NULL), 0), - TokenizerState::SummaryValueEnd => IntermediatePortState::Finished(TokenizerState::End), + PositionState::SummaryPreamble => IntermediatePortState::Finished(PositionState::Title), + PositionState::SummaryOpener => IntermediatePortState::NormalContent(Some(addr::unit::NULL), 0), + PositionState::SummaryLabel(i) => IntermediatePortState::NormalContent(None, *i), + PositionState::SummarySeparator(i) => IntermediatePortState::NormalContent(None, *i), + PositionState::SummaryCloser => IntermediatePortState::Finished(PositionState::End), + PositionState::SummaryEpilogue => IntermediatePortState::Finished(PositionState::PostBlank), + PositionState::SummaryValueBegin => IntermediatePortState::Finished(PositionState::Title), + PositionState::SummaryLeaf => IntermediatePortState::NormalContent(Some(addr::unit::NULL), 0), + PositionState::SummaryValueEnd => IntermediatePortState::Finished(PositionState::End), - TokenizerState::PostBlank => IntermediatePortState::Finished(TokenizerState::PostBlank), - TokenizerState::End => IntermediatePortState::Finished(TokenizerState::End), + PositionState::PostBlank => IntermediatePortState::Finished(PositionState::PostBlank), + PositionState::End => IntermediatePortState::Finished(PositionState::End), }, (PortStackMode::Normal | PortStackMode::Summary, state) => match state { // TODO: these are maybe wrong and should be tested - TokenizerState::PreBlank => - IntermediatePortState::Finished(TokenizerState::SummaryValueBegin), - TokenizerState::Title => - IntermediatePortState::Finished(TokenizerState::SummaryValueBegin), + PositionState::PreBlank => + IntermediatePortState::Finished(PositionState::SummaryValueBegin), + PositionState::Title => + IntermediatePortState::Finished(PositionState::SummaryValueBegin), - TokenizerState::MetaContent(_, index) => IntermediatePortState::SummaryLabel(*index), - TokenizerState::Hexdump { index, .. } => IntermediatePortState::SummaryLabel(*index), - TokenizerState::Hexstring(_, index) => IntermediatePortState::SummaryLabel(*index), - - TokenizerState::SummaryPreamble => IntermediatePortState::Finished(TokenizerState::SummaryPreamble), - TokenizerState::SummaryOpener => IntermediatePortState::Finished(TokenizerState::SummaryOpener), - TokenizerState::SummaryLabel(i) => IntermediatePortState::SummaryLabel(*i), - TokenizerState::SummarySeparator(i) => IntermediatePortState::SummarySeparator(*i), - TokenizerState::SummaryCloser => IntermediatePortState::Finished(TokenizerState::SummaryCloser), - TokenizerState::SummaryEpilogue => IntermediatePortState::Finished(TokenizerState::SummaryEpilogue), - TokenizerState::SummaryValueBegin => IntermediatePortState::Finished(TokenizerState::SummaryValueBegin), - TokenizerState::SummaryLeaf => IntermediatePortState::Finished(TokenizerState::SummaryLeaf), - TokenizerState::SummaryValueEnd => IntermediatePortState::Finished(TokenizerState::SummaryValueEnd), - - TokenizerState::PostBlank => IntermediatePortState::Finished(TokenizerState::SummaryEpilogue), - TokenizerState::End => IntermediatePortState::Finished(TokenizerState::SummaryCloser), + PositionState::MetaContent(_, index) => IntermediatePortState::SummaryLabel(*index), + PositionState::Hexdump { index, .. } => IntermediatePortState::SummaryLabel(*index), + PositionState::Hexstring(_, index) => IntermediatePortState::SummaryLabel(*index), + + PositionState::SummaryPreamble => IntermediatePortState::Finished(PositionState::SummaryPreamble), + PositionState::SummaryOpener => IntermediatePortState::Finished(PositionState::SummaryOpener), + PositionState::SummaryLabel(i) => IntermediatePortState::SummaryLabel(*i), + PositionState::SummarySeparator(i) => IntermediatePortState::SummarySeparator(*i), + PositionState::SummaryCloser => IntermediatePortState::Finished(PositionState::SummaryCloser), + PositionState::SummaryEpilogue => IntermediatePortState::Finished(PositionState::SummaryEpilogue), + PositionState::SummaryValueBegin => IntermediatePortState::Finished(PositionState::SummaryValueBegin), + PositionState::SummaryLeaf => IntermediatePortState::Finished(PositionState::SummaryLeaf), + PositionState::SummaryValueEnd => IntermediatePortState::Finished(PositionState::SummaryValueEnd), + + PositionState::PostBlank => IntermediatePortState::Finished(PositionState::SummaryEpilogue), + PositionState::End => IntermediatePortState::Finished(PositionState::SummaryCloser), } }; @@ -440,9 +433,9 @@ impl Tokenizer { let is_summary = stack_state.summarized(); - *self = Tokenizer { + *self = Position { stack: stack_state.new_stack, - state: TokenizerState::End, /* this is a placeholder. we finalize the details later... */ + state: PositionState::End, /* this is a placeholder. we finalize the details later... */ apparent_depth: stack_state.apparent_depth, logical_depth: stack_state.logical_depth, node: stack_state.node, @@ -481,7 +474,7 @@ impl Tokenizer { } else if let Some(offset) = offset.as_mut() { if new_childhood.extent().includes(*offset) { /* if new node contains our offset, we need to descend into it. The state here is, once again, a placeholder. */ - self.descend(if is_summary { TokenizerDescent::ChildSummary(*affected_index) } else { TokenizerDescent::Child(*affected_index) }, TokenizerState::End); + self.descend(if is_summary { Descent::ChildSummary(*affected_index) } else { Descent::Child(*affected_index) }, PositionState::End); *index = 0; *offset-= new_childhood.offset.to_size(); @@ -509,7 +502,7 @@ impl Tokenizer { /* descend into the new node. */ let new_nest_offset = new_nest.offset; - self.descend(if is_summary { TokenizerDescent::ChildSummary(range.first) } else { TokenizerDescent::Child(range.first) }, TokenizerState::End); + self.descend(if is_summary { Descent::ChildSummary(range.first) } else { Descent::Child(range.first) }, PositionState::End); // TODO: is there something more helpful we could do here? *index = 0; @@ -558,14 +551,14 @@ impl Tokenizer { } else { None }); - TokenizerState::MetaContent(line_begin, index) + PositionState::MetaContent(line_begin, index) }, - /* Real TokenizerState doesn't support one-past-the-end for SummaryLabel and SummarySeparator, so need to fix if that would be the case. */ - IntermediatePortState::SummaryLabel(index) if index < children.len() => TokenizerState::SummaryLabel(index), - IntermediatePortState::SummarySeparator(index) if index < children.len() => TokenizerState::SummarySeparator(index), - IntermediatePortState::SummaryLabel(_) => TokenizerState::SummaryCloser, - IntermediatePortState::SummarySeparator(_) => TokenizerState::SummaryCloser, + /* Real PositionState doesn't support one-past-the-end for SummaryLabel and SummarySeparator, so need to fix if that would be the case. */ + IntermediatePortState::SummaryLabel(index) if index < children.len() => PositionState::SummaryLabel(index), + IntermediatePortState::SummarySeparator(index) if index < children.len() => PositionState::SummarySeparator(index), + IntermediatePortState::SummaryLabel(_) => PositionState::SummaryCloser, + IntermediatePortState::SummarySeparator(_) => PositionState::SummaryCloser, }; /* Adjust our stream position to actually be on a token. */ @@ -577,9 +570,9 @@ impl Tokenizer { } } - /// Used to recurse to the base of the tokenizer stack so we can start porting from the top down. Returns whether or not to keep going. + /// Used to recurse to the base of the stack so we can start porting from the top down. Returns whether or not to keep going. #[instrument] - fn port_recurse(tok: &TokenizerStackEntry, new_root: &sync::Arc, change: &change::Change) -> PortStackState { + fn port_recurse(tok: &StackEntry, new_root: &sync::Arc, change: &change::Change) -> PortStackState { match &tok.stack { Some(parent) => { let mut state = Self::port_recurse(parent, new_root, change); @@ -595,15 +588,15 @@ impl Tokenizer { } } - /// Applies a change to a single item in the tokenizer stack. If we need to stop descending in the middle of the stack, return the + /// Applies a change to a single item in the stack. If we need to stop descending in the middle of the stack, return the #[instrument] - fn port_stack_entry(state: &mut PortStackState, old_tok: &TokenizerStackEntry, change: &change::Change) { + fn port_stack_entry(state: &mut PortStackState, old_tok: &StackEntry, change: &change::Change) { /* This logic more-or-less mirrors change::update_path */ let child_index = match old_tok.descent { - TokenizerDescent::Child(child_index) | TokenizerDescent::ChildSummary(child_index) => child_index, + Descent::Child(child_index) | Descent::ChildSummary(child_index) => child_index, /* This is handled implicitly by PortStackState::push behavior. */ - TokenizerDescent::MySummary => return, + Descent::MySummary => return, }; match &change.ty { @@ -654,12 +647,12 @@ impl Tokenizer { if summary { if self.node.children.len() > 0 { - self.state = TokenizerState::SummaryLabel(index); + self.state = PositionState::SummaryLabel(index); } else { - self.state = TokenizerState::SummaryLeaf; + self.state = PositionState::SummaryLeaf; } } else { - self.state = TokenizerState::MetaContent(self.estimate_line_begin(Some(offset), index), index); + self.state = PositionState::MetaContent(self.estimate_line_begin(Some(offset), index), index); } while match self.gen_token() { @@ -670,11 +663,11 @@ impl Tokenizer { } } - /// Creates a new tokenizer seeked to the end of the token stream. - pub fn at_end(root: &sync::Arc) -> Tokenizer { - Tokenizer { + /// Creates a new position seeked to the end of the token stream. + pub fn at_end(root: &sync::Arc) -> Position { + Position { stack: None, - state: TokenizerState::End, + state: PositionState::End, apparent_depth: 0, logical_depth: 0, node: root.clone(), @@ -692,7 +685,7 @@ impl Tokenizer { }; match self.state { - TokenizerState::PreBlank => if self.node.props.title_display.has_blanks() { + PositionState::PreBlank => if self.node.props.title_display.has_blanks() { TokenGenerationResult::Ok(token::BlankLineToken { common, accepts_cursor: false, @@ -700,28 +693,28 @@ impl Tokenizer { } else { TokenGenerationResult::Skip }, - TokenizerState::Title => TokenGenerationResult::Ok(token::TitleToken { + PositionState::Title => TokenGenerationResult::Ok(token::TitleToken { common, }.into_token()), - TokenizerState::MetaContent(_, _) => TokenGenerationResult::Skip, - TokenizerState::Hexdump { extent, line_extent, index, .. } => TokenGenerationResult::Ok(token::HexdumpToken { + PositionState::MetaContent(_, _) => TokenGenerationResult::Skip, + PositionState::Hexdump { extent, line_extent, index, .. } => TokenGenerationResult::Ok(token::HexdumpToken { common: common.adjust_depth(1), index, extent, line: line_extent, }.into_token()), - TokenizerState::Hexstring(extent, _) => TokenGenerationResult::Ok(token::HexstringToken::new_maybe_truncate(common.adjust_depth(1), extent).into_token()), + PositionState::Hexstring(extent, _) => TokenGenerationResult::Ok(token::HexstringToken::new_maybe_truncate(common.adjust_depth(1), extent).into_token()), - TokenizerState::SummaryPreamble => TokenGenerationResult::Ok(token::SummaryPreambleToken { + PositionState::SummaryPreamble => TokenGenerationResult::Ok(token::SummaryPreambleToken { common, }.into_token()), - TokenizerState::SummaryOpener => TokenGenerationResult::Ok(token::SummaryPunctuationToken { + PositionState::SummaryOpener => TokenGenerationResult::Ok(token::SummaryPunctuationToken { common, kind: token::PunctuationKind::OpenBracket, index: 0, /* unused */ }.into_token()), - TokenizerState::SummaryLabel(i) => { + PositionState::SummaryLabel(i) => { let ch = &self.node.children[i]; TokenGenerationResult::Ok(token::SummaryLabelToken { common: token::TokenCommon { @@ -732,7 +725,7 @@ impl Tokenizer { }, }.into_token()) }, - TokenizerState::SummarySeparator(i) => if i+1 < self.node.children.len() { + PositionState::SummarySeparator(i) => if i+1 < self.node.children.len() { TokenGenerationResult::Ok(token::SummaryPunctuationToken { common, kind: token::PunctuationKind::Comma, @@ -741,17 +734,17 @@ impl Tokenizer { } else { TokenGenerationResult::Skip }, - TokenizerState::SummaryCloser => TokenGenerationResult::Ok(token::SummaryPunctuationToken { + PositionState::SummaryCloser => TokenGenerationResult::Ok(token::SummaryPunctuationToken { common, kind: token::PunctuationKind::CloseBracket, index: 0, /* unused */ }.into_token()), - TokenizerState::SummaryEpilogue => TokenGenerationResult::Ok(token::SummaryEpilogueToken { + PositionState::SummaryEpilogue => TokenGenerationResult::Ok(token::SummaryEpilogueToken { common, }.into_token()), - TokenizerState::SummaryValueBegin => TokenGenerationResult::Skip, - TokenizerState::SummaryLeaf => { + PositionState::SummaryValueBegin => TokenGenerationResult::Skip, + PositionState::SummaryLeaf => { let limit = std::cmp::min(16.into(), self.node.size); let extent = addr::Extent::between(addr::unit::NULL, limit.to_addr()); @@ -766,9 +759,9 @@ impl Tokenizer { structure::ContentDisplay::Hexstring => token::HexstringToken::new_maybe_truncate(common, extent).into_token(), }) }, - TokenizerState::SummaryValueEnd => TokenGenerationResult::Skip, + PositionState::SummaryValueEnd => TokenGenerationResult::Skip, - TokenizerState::PostBlank => if self.node.props.title_display.has_blanks() { + PositionState::PostBlank => if self.node.props.title_display.has_blanks() { TokenGenerationResult::Ok(token::BlankLineToken { common: common.adjust_depth(1), accepts_cursor: true, @@ -776,7 +769,7 @@ impl Tokenizer { } else { TokenGenerationResult::Skip }, - TokenizerState::End => TokenGenerationResult::Skip, + PositionState::End => TokenGenerationResult::Skip, } } @@ -839,19 +832,19 @@ impl Tokenizer { /// Returns true when successful, or false if hit the beginning of the token stream. pub fn move_prev(&mut self) -> bool { match self.state { - TokenizerState::PreBlank => { + PositionState::PreBlank => { self.try_ascend(AscendDirection::Prev) }, - TokenizerState::Title => { - self.state = TokenizerState::PreBlank; + PositionState::Title => { + self.state = PositionState::PreBlank; true }, - TokenizerState::SummaryPreamble => { - self.state = TokenizerState::Title; + PositionState::SummaryPreamble => { + self.state = PositionState::Title; true }, - TokenizerState::MetaContent(offset, index) => { + PositionState::MetaContent(offset, index) => { let prev_child_option = match index { 0 => None, /* Something is seriously wrong if index was farther than one-past-the-end. */ @@ -862,9 +855,9 @@ impl Tokenizer { if let Some((prev_child_index, prev_child)) = prev_child_option { if prev_child.end() >= offset { self.descend( - TokenizerDescent::Child(prev_child_index), + Descent::Child(prev_child_index), /* Descend to thse end of the child. */ - TokenizerState::End); + PositionState::End); return true; } @@ -877,100 +870,100 @@ impl Tokenizer { let interstitial = addr::Extent::between(interstitial.0, interstitial.1); self.state = match self.node.props.content_display { - structure::ContentDisplay::None => TokenizerState::MetaContent(interstitial.begin, index), + structure::ContentDisplay::None => PositionState::MetaContent(interstitial.begin, index), structure::ContentDisplay::Hexdump { line_pitch, gutter_pitch: _ } => { let line_extent = self.get_line_extent(offset - addr::unit::BIT, line_pitch); - TokenizerState::Hexdump { + PositionState::Hexdump { extent: addr::Extent::between(std::cmp::max(interstitial.begin, line_extent.begin), offset), line_extent, index } } - structure::ContentDisplay::Hexstring => TokenizerState::Hexstring(interstitial, index), + structure::ContentDisplay::Hexstring => PositionState::Hexstring(interstitial, index), }; return true; } /* We're pointed at the beginning. Emit the title block. */ - self.state = TokenizerState::Title; + self.state = PositionState::Title; true }, - TokenizerState::Hexstring(extent, index) => { - self.state = TokenizerState::MetaContent(extent.begin, index); + PositionState::Hexstring(extent, index) => { + self.state = PositionState::MetaContent(extent.begin, index); true }, - TokenizerState::Hexdump { extent, index, .. } => { - self.state = TokenizerState::MetaContent(extent.begin, index); + PositionState::Hexdump { extent, index, .. } => { + self.state = PositionState::MetaContent(extent.begin, index); true }, - TokenizerState::SummaryOpener => { + PositionState::SummaryOpener => { self.try_ascend(AscendDirection::Prev) }, - TokenizerState::SummaryLabel(i) => { + PositionState::SummaryLabel(i) => { if i == 0 { - self.state = TokenizerState::SummaryOpener; + self.state = PositionState::SummaryOpener; } else { - self.state = TokenizerState::SummarySeparator(i-1); + self.state = PositionState::SummarySeparator(i-1); } true }, - TokenizerState::SummarySeparator(i) => { + PositionState::SummarySeparator(i) => { self.descend( - TokenizerDescent::ChildSummary(i), - TokenizerState::SummaryValueEnd); + Descent::ChildSummary(i), + PositionState::SummaryValueEnd); true }, - TokenizerState::SummaryCloser => { + PositionState::SummaryCloser => { if self.node.children.is_empty() { - self.state = TokenizerState::SummaryOpener; + self.state = PositionState::SummaryOpener; } else { - self.state = TokenizerState::SummarySeparator(self.node.children.len()-1); + self.state = PositionState::SummarySeparator(self.node.children.len()-1); } true }, - TokenizerState::SummaryEpilogue => { + PositionState::SummaryEpilogue => { self.descend( - TokenizerDescent::MySummary, - TokenizerState::SummaryCloser); + Descent::MySummary, + PositionState::SummaryCloser); true }, - TokenizerState::SummaryValueBegin => { + PositionState::SummaryValueBegin => { // should take us to SummaryLabel(i) self.try_ascend(AscendDirection::Prev) }, - TokenizerState::SummaryLeaf => { - self.state = TokenizerState::SummaryValueBegin; + PositionState::SummaryLeaf => { + self.state = PositionState::SummaryValueBegin; true }, - TokenizerState::SummaryValueEnd => { + PositionState::SummaryValueEnd => { if self.node.children.is_empty() { - self.state = TokenizerState::SummaryLeaf; + self.state = PositionState::SummaryLeaf; } else { - self.state = TokenizerState::SummaryCloser; + self.state = PositionState::SummaryCloser; } true }, - TokenizerState::PostBlank => { + PositionState::PostBlank => { match self.node.props.children_display { structure::ChildrenDisplay::None => { - self.state = TokenizerState::MetaContent(self.node.size.to_addr(), self.node.children.len()); + self.state = PositionState::MetaContent(self.node.size.to_addr(), self.node.children.len()); }, structure::ChildrenDisplay::Summary => { - self.state = TokenizerState::SummaryEpilogue; + self.state = PositionState::SummaryEpilogue; }, structure::ChildrenDisplay::Full => { - self.state = TokenizerState::MetaContent(self.node.size.to_addr(), self.node.children.len()); + self.state = PositionState::MetaContent(self.node.size.to_addr(), self.node.children.len()); }, } true }, - TokenizerState::End => { - self.state = TokenizerState::PostBlank; + PositionState::End => { + self.state = PositionState::PostBlank; true }, } @@ -980,41 +973,41 @@ impl Tokenizer { /// Returns true when successful, or false if hit the end of the token stream. pub fn move_next(&mut self) -> bool { match self.state { - TokenizerState::PreBlank => { - self.state = TokenizerState::Title; + PositionState::PreBlank => { + self.state = PositionState::Title; true }, - TokenizerState::Title => { + PositionState::Title => { match self.node.props.children_display { structure::ChildrenDisplay::None => { - self.state = TokenizerState::MetaContent(addr::unit::NULL, 0); + self.state = PositionState::MetaContent(addr::unit::NULL, 0); }, structure::ChildrenDisplay::Summary => { - self.state = TokenizerState::SummaryPreamble; + self.state = PositionState::SummaryPreamble; }, structure::ChildrenDisplay::Full => { - self.state = TokenizerState::MetaContent(addr::unit::NULL, 0); + self.state = PositionState::MetaContent(addr::unit::NULL, 0); }, } true }, - TokenizerState::SummaryPreamble => { + PositionState::SummaryPreamble => { self.descend( - TokenizerDescent::MySummary, - TokenizerState::SummaryOpener); + Descent::MySummary, + PositionState::SummaryOpener); true }, - TokenizerState::MetaContent(offset, index) => { + PositionState::MetaContent(offset, index) => { let next_child_option = self.node.children.get(index).map(|child| (index, child)); /* Descend, if we can. */ if let Some((next_child_index, next_child)) = next_child_option { if next_child.offset <= offset { self.descend( - TokenizerDescent::Child(next_child_index), + Descent::Child(next_child_index), /* Descend to the beginning of the child. */ - TokenizerState::PreBlank); + PositionState::PreBlank); return true; } @@ -1027,86 +1020,86 @@ impl Tokenizer { let interstitial = addr::Extent::between(interstitial.0, interstitial.1); self.state = match self.node.props.content_display { - structure::ContentDisplay::None => TokenizerState::MetaContent(interstitial.end, index), + structure::ContentDisplay::None => PositionState::MetaContent(interstitial.end, index), structure::ContentDisplay::Hexdump { line_pitch, gutter_pitch: _ } => { let line_extent = self.get_line_extent(offset, line_pitch); - TokenizerState::Hexdump { + PositionState::Hexdump { extent: addr::Extent::between(offset, std::cmp::min(line_extent.end, interstitial.end)), line_extent, index } }, - structure::ContentDisplay::Hexstring => TokenizerState::Hexstring(interstitial, index), + structure::ContentDisplay::Hexstring => PositionState::Hexstring(interstitial, index), }; return true; } /* We were pointed at (or past!) the end. */ - self.state = TokenizerState::PostBlank; + self.state = PositionState::PostBlank; true }, - TokenizerState::Hexstring(extent, index) => { - self.state = TokenizerState::MetaContent(extent.end, index); + PositionState::Hexstring(extent, index) => { + self.state = PositionState::MetaContent(extent.end, index); true }, - TokenizerState::Hexdump { extent, index, .. } => { - self.state = TokenizerState::MetaContent(extent.end, index); + PositionState::Hexdump { extent, index, .. } => { + self.state = PositionState::MetaContent(extent.end, index); true }, - TokenizerState::SummaryOpener => { + PositionState::SummaryOpener => { if self.node.children.is_empty() { - self.state = TokenizerState::SummaryCloser; + self.state = PositionState::SummaryCloser; } else { - self.state = TokenizerState::SummaryLabel(0); + self.state = PositionState::SummaryLabel(0); } true }, - TokenizerState::SummaryLabel(i) => { + PositionState::SummaryLabel(i) => { self.descend( - TokenizerDescent::ChildSummary(i), - TokenizerState::SummaryValueBegin); + Descent::ChildSummary(i), + PositionState::SummaryValueBegin); true }, - TokenizerState::SummarySeparator(i) => { + PositionState::SummarySeparator(i) => { if self.node.children.len() == i + 1 { - self.state = TokenizerState::SummaryCloser; + self.state = PositionState::SummaryCloser; } else { - self.state = TokenizerState::SummaryLabel(i+1); + self.state = PositionState::SummaryLabel(i+1); } true }, - TokenizerState::SummaryCloser => { + PositionState::SummaryCloser => { self.try_ascend(AscendDirection::Next) }, - TokenizerState::SummaryEpilogue => { - self.state = TokenizerState::PostBlank; + PositionState::SummaryEpilogue => { + self.state = PositionState::PostBlank; true }, - TokenizerState::SummaryValueBegin => { + PositionState::SummaryValueBegin => { if self.node.children.is_empty() { - self.state = TokenizerState::SummaryLeaf; + self.state = PositionState::SummaryLeaf; } else { - self.state = TokenizerState::SummaryOpener; + self.state = PositionState::SummaryOpener; } true }, - TokenizerState::SummaryLeaf => { - self.state = TokenizerState::SummaryValueEnd; + PositionState::SummaryLeaf => { + self.state = PositionState::SummaryValueEnd; true }, - TokenizerState::SummaryValueEnd => { + PositionState::SummaryValueEnd => { self.try_ascend(AscendDirection::Next) }, - TokenizerState::PostBlank => { - self.state = TokenizerState::End; + PositionState::PostBlank => { + self.state = PositionState::End; true }, - TokenizerState::End => { + PositionState::End => { self.try_ascend(AscendDirection::Next) }, } @@ -1123,7 +1116,7 @@ impl Tokenizer { None } - /// Use this when you're trying to have the tokenizer's position represent an element. + /// Use this when you're trying to have the position represent an element. pub fn next_preincrement(&mut self) -> Option { while { self.move_next() @@ -1137,7 +1130,7 @@ impl Tokenizer { None } - /// Use this when you're trying to have the tokenizer's position represent a border between tokens. + /// Use this when you're trying to have the position represent a boundary between tokens. pub fn next_postincrement(&mut self) -> Option { let mut token; while { @@ -1166,7 +1159,7 @@ impl Tokenizer { } } - /// Pushes an entry onto the tokenizer stack and sets up for traversing + /// Pushes an entry onto the stack and sets up for traversing /// a child node. /// /// # Arguments @@ -1176,13 +1169,13 @@ impl Tokenizer { /// fn descend( &mut self, - descent: TokenizerDescent, - state_within: TokenizerState) { + descent: Descent, + state_within: PositionState) { let childhood = descent.childhood(&self.node); let parent_node = std::mem::replace(&mut self.node, childhood.node); let depth_change = descent.depth_change(); - let parent_entry = TokenizerStackEntry { + let parent_entry = StackEntry { stack: self.stack.take(), descent, apparent_depth: self.apparent_depth, @@ -1204,7 +1197,7 @@ impl Tokenizer { match std::mem::replace(&mut self.stack, None) { Some(stack_entry) => { let stack_entry = sync::Arc::unwrap_or_clone(stack_entry); - *self = Tokenizer { + *self = Position { state: match dir { AscendDirection::Prev => stack_entry.descent.before_state(&stack_entry), AscendDirection::Next => stack_entry.descent.after_state(&stack_entry) @@ -1223,14 +1216,14 @@ impl Tokenizer { pub fn hit_bottom(&self) -> bool { match self.state { - TokenizerState::End => self.stack.is_none(), + PositionState::End => self.stack.is_none(), _ => false } } pub fn hit_top(&self) -> bool { match self.state { - TokenizerState::PreBlank => self.stack.is_none(), + PositionState::PreBlank => self.stack.is_none(), _ => false } } @@ -1238,34 +1231,34 @@ impl Tokenizer { pub fn structure_path(&self) -> structure::Path { let mut path = Vec::new(); - TokenizerStackEntry::build_path(&self.stack, &mut path); + StackEntry::build_path(&self.stack, &mut path); path } pub fn structure_position_child(&self) -> usize { match self.state { - TokenizerState::MetaContent(_, ch) => ch, - TokenizerState::Hexdump { index: ch, .. } => ch, - TokenizerState::Hexstring(_, ch) => ch, - TokenizerState::SummaryLabel(ch) => ch, - TokenizerState::SummarySeparator(ch) => ch, - TokenizerState::SummaryCloser => self.node.children.len(), - TokenizerState::SummaryEpilogue => self.node.children.len(), - TokenizerState::PostBlank => self.node.children.len(), - TokenizerState::End => self.node.children.len(), + PositionState::MetaContent(_, ch) => ch, + PositionState::Hexdump { index: ch, .. } => ch, + PositionState::Hexstring(_, ch) => ch, + PositionState::SummaryLabel(ch) => ch, + PositionState::SummarySeparator(ch) => ch, + PositionState::SummaryCloser => self.node.children.len(), + PositionState::SummaryEpilogue => self.node.children.len(), + PositionState::PostBlank => self.node.children.len(), + PositionState::End => self.node.children.len(), _ => 0, } } pub fn structure_position_offset(&self) -> addr::Address { match self.state { - TokenizerState::MetaContent(offset, _) => offset, - TokenizerState::Hexdump { extent, .. } => extent.begin, - TokenizerState::Hexstring(extent, _) => extent.begin, - TokenizerState::SummaryEpilogue => self.node.size.to_addr(), - TokenizerState::PostBlank => self.node.size.to_addr(), - TokenizerState::End => self.node.size.to_addr(), + PositionState::MetaContent(offset, _) => offset, + PositionState::Hexdump { extent, .. } => extent.begin, + PositionState::Hexstring(extent, _) => extent.begin, + PositionState::SummaryEpilogue => self.node.size.to_addr(), + PositionState::PostBlank => self.node.size.to_addr(), + PositionState::End => self.node.size.to_addr(), // TODO: probably some missing here, need to figure out what is intuitive to the user. _ => addr::unit::NULL } @@ -1274,29 +1267,29 @@ impl Tokenizer { /// Returns true if the token that would be returned by gen_token() is part of a summary. pub fn in_summary(&self) -> bool { match self.state { - TokenizerState::PreBlank => false, - TokenizerState::Title => false, - TokenizerState::MetaContent(_, _) => false, - TokenizerState::Hexdump { .. } => false, - TokenizerState::Hexstring(_, _) => false, - - TokenizerState::SummaryPreamble => true, - TokenizerState::SummaryOpener => true, - TokenizerState::SummaryLabel(_) => true, - TokenizerState::SummarySeparator(_) => true, - TokenizerState::SummaryCloser => true, - TokenizerState::SummaryEpilogue => true, - TokenizerState::SummaryValueBegin => true, - TokenizerState::SummaryLeaf => true, - TokenizerState::SummaryValueEnd => true, - - TokenizerState::PostBlank => false, - TokenizerState::End => false, + PositionState::PreBlank => false, + PositionState::Title => false, + PositionState::MetaContent(_, _) => false, + PositionState::Hexdump { .. } => false, + PositionState::Hexstring(_, _) => false, + + PositionState::SummaryPreamble => true, + PositionState::SummaryOpener => true, + PositionState::SummaryLabel(_) => true, + PositionState::SummarySeparator(_) => true, + PositionState::SummaryCloser => true, + PositionState::SummaryEpilogue => true, + PositionState::SummaryValueBegin => true, + PositionState::SummaryLeaf => true, + PositionState::SummaryValueEnd => true, + + PositionState::PostBlank => false, + PositionState::End => false, } } } -impl PartialEq for Tokenizer { +impl PartialEq for Position { fn eq(&self, other: &Self) -> bool { (match (&self.stack, &other.stack) { (Some(x), Some(y)) => sync::Arc::ptr_eq(x, y), @@ -1309,15 +1302,15 @@ impl PartialEq for Tokenizer { } } -impl Eq for Tokenizer { +impl Eq for Position { } -impl TokenizerDescent { +impl Descent { fn childhood(&self, node: &sync::Arc) -> structure::Childhood { match self { - TokenizerDescent::Child(i) => node.children[*i].clone(), - TokenizerDescent::ChildSummary(i) => node.children[*i].clone(), - TokenizerDescent::MySummary => structure::Childhood { + Descent::Child(i) => node.children[*i].clone(), + Descent::ChildSummary(i) => node.children[*i].clone(), + Descent::MySummary => structure::Childhood { node: node.clone(), offset: addr::unit::NULL, }, @@ -1326,37 +1319,37 @@ impl TokenizerDescent { fn depth_change(&self) -> usize { match self { - TokenizerDescent::Child(_) | TokenizerDescent::ChildSummary(_) => 1, - TokenizerDescent::MySummary => 0, + Descent::Child(_) | Descent::ChildSummary(_) => 1, + Descent::MySummary => 0, } } - fn before_state(&self, stack_entry: &TokenizerStackEntry) -> TokenizerState { + fn before_state(&self, stack_entry: &StackEntry) -> PositionState { match self { - TokenizerDescent::Child(i) => TokenizerState::MetaContent(stack_entry.node.children[*i].offset, *i), - TokenizerDescent::ChildSummary(i) => TokenizerState::SummaryLabel(*i), - TokenizerDescent::MySummary => TokenizerState::SummaryPreamble, + Descent::Child(i) => PositionState::MetaContent(stack_entry.node.children[*i].offset, *i), + Descent::ChildSummary(i) => PositionState::SummaryLabel(*i), + Descent::MySummary => PositionState::SummaryPreamble, } } - fn after_state(&self, stack_entry: &TokenizerStackEntry) -> TokenizerState { + fn after_state(&self, stack_entry: &StackEntry) -> PositionState { match self { - TokenizerDescent::Child(i) => TokenizerState::MetaContent(stack_entry.node.children[*i].end(), *i+1), - TokenizerDescent::ChildSummary(i) => TokenizerState::SummarySeparator(*i), - TokenizerDescent::MySummary => TokenizerState::SummaryEpilogue, + Descent::Child(i) => PositionState::MetaContent(stack_entry.node.children[*i].end(), *i+1), + Descent::ChildSummary(i) => PositionState::SummarySeparator(*i), + Descent::MySummary => PositionState::SummaryEpilogue, } } fn build_path(&self, path: &mut structure::Path) { match self { - TokenizerDescent::Child(i) | TokenizerDescent::ChildSummary(i) => path.push(*i), - TokenizerDescent::MySummary => {}, + Descent::Child(i) | Descent::ChildSummary(i) => path.push(*i), + Descent::MySummary => {}, } } } -impl TokenizerStackEntry { - fn build_path(entry: &Option>, path: &mut structure::Path) { +impl StackEntry { + fn build_path(entry: &Option>, path: &mut structure::Path) { if let Some(tse) = entry { Self::build_path(&tse.stack, path); tse.descent.build_path(path); @@ -1364,8 +1357,8 @@ impl TokenizerStackEntry { } } -impl TokenizerState { - /// Returns whether or not this state represents a state that should only exist within a TokenizerDescent::MySummary +impl PositionState { + /// Returns whether or not this state represents a state that should only exist within a Descent::MySummary /// stack entry. fn is_summary(&self) -> bool { match self { @@ -1384,7 +1377,7 @@ impl TokenizerState { } } -impl PartialEq for TokenizerStackEntry { +impl PartialEq for StackEntry { fn eq(&self, other: &Self) -> bool { (match (&self.stack, &other.stack) { (Some(x), Some(y)) => sync::Arc::ptr_eq(x, y), @@ -1397,7 +1390,7 @@ impl PartialEq for TokenizerStackEntry { } } -impl Eq for TokenizerStackEntry { +impl Eq for StackEntry { } impl PortStackState { @@ -1491,7 +1484,7 @@ impl PortStackState { match self.mode { PortStackMode::Normal => { /* Need to insert MySummary */ - self.push_descent(TokenizerDescent::MySummary); + self.push_descent(Descent::MySummary); self.mode = PortStackMode::Summary; }, PortStackMode::Summary => { @@ -1504,7 +1497,7 @@ impl PortStackState { } } - fn push_descent(&mut self, descent: TokenizerDescent) { + fn push_descent(&mut self, descent: Descent) { descent.build_path(&mut self.current_path); match &mut self.mode { @@ -1512,7 +1505,7 @@ impl PortStackState { let childhood = descent.childhood(&self.node); let parent_node = std::mem::replace(&mut self.node, childhood.node); - let tse = TokenizerStackEntry { + let tse = StackEntry { stack: self.new_stack.take(), descent, apparent_depth: self.apparent_depth, @@ -1556,8 +1549,8 @@ impl PortStackState { fn push(&mut self, child: usize) { let descent = match self.node.props.children_display { structure::ChildrenDisplay::None => todo!(), - structure::ChildrenDisplay::Summary => { self.summarize(); TokenizerDescent::ChildSummary(child) } - structure::ChildrenDisplay::Full => TokenizerDescent::Child(child), + structure::ChildrenDisplay::Summary => { self.summarize(); Descent::ChildSummary(child) } + structure::ChildrenDisplay::Full => Descent::Child(child), }; self.push_descent(descent); @@ -1571,8 +1564,8 @@ mod cmp { #[derive(PartialEq)] enum StateOrDescent { - State(super::TokenizerState), - Descent(super::TokenizerDescent), + State(super::PositionState), + Descent(super::Descent), } impl std::cmp::PartialOrd for StateOrDescent { @@ -1583,41 +1576,41 @@ mod cmp { (Self::Descent(_), Self::State(_)) => Self::partial_cmp(other, self).map(std::cmp::Ordering::reverse), (Self::State(x), Self::Descent(y)) => { let child_index = match y { - super::TokenizerDescent::Child(i) => i, - super::TokenizerDescent::ChildSummary(i) => i, - super::TokenizerDescent::MySummary => return Some(match x { - super::TokenizerState::PreBlank => std::cmp::Ordering::Less, - super::TokenizerState::Title => std::cmp::Ordering::Less, - super::TokenizerState::SummaryPreamble => std::cmp::Ordering::Less, - super::TokenizerState::SummaryEpilogue => std::cmp::Ordering::Greater, - super::TokenizerState::PostBlank => std::cmp::Ordering::Greater, - super::TokenizerState::End => std::cmp::Ordering::Greater, + super::Descent::Child(i) => i, + super::Descent::ChildSummary(i) => i, + super::Descent::MySummary => return Some(match x { + super::PositionState::PreBlank => std::cmp::Ordering::Less, + super::PositionState::Title => std::cmp::Ordering::Less, + super::PositionState::SummaryPreamble => std::cmp::Ordering::Less, + super::PositionState::SummaryEpilogue => std::cmp::Ordering::Greater, + super::PositionState::PostBlank => std::cmp::Ordering::Greater, + super::PositionState::End => std::cmp::Ordering::Greater, _ => return None, }), }; Some(match x { - super::TokenizerState::PreBlank => std::cmp::Ordering::Less, - super::TokenizerState::Title => std::cmp::Ordering::Less, - super::TokenizerState::MetaContent(_, i) if i <= child_index => std::cmp::Ordering::Less, - super::TokenizerState::MetaContent(_, _) => std::cmp::Ordering::Greater, - super::TokenizerState::Hexdump { index, .. } if index == child_index => std::cmp::Ordering::Less, - super::TokenizerState::Hexdump { index, .. } => index.cmp(child_index), - super::TokenizerState::Hexstring(_, i) if i == child_index => std::cmp::Ordering::Less, - super::TokenizerState::Hexstring(_, i) => i.cmp(child_index), - super::TokenizerState::SummaryPreamble => std::cmp::Ordering::Less, - super::TokenizerState::SummaryOpener => std::cmp::Ordering::Less, - super::TokenizerState::SummaryLabel(i) if i == child_index => std::cmp::Ordering::Less, - super::TokenizerState::SummaryLabel(i) => i.cmp(child_index), - super::TokenizerState::SummarySeparator(i) if i == child_index => std::cmp::Ordering::Greater, - super::TokenizerState::SummarySeparator(i) => i.cmp(child_index), - super::TokenizerState::SummaryCloser => std::cmp::Ordering::Greater, - super::TokenizerState::SummaryEpilogue => std::cmp::Ordering::Greater, - super::TokenizerState::SummaryValueBegin => std::cmp::Ordering::Less, - super::TokenizerState::SummaryValueEnd => std::cmp::Ordering::Greater, - super::TokenizerState::SummaryLeaf => return None, - super::TokenizerState::PostBlank => std::cmp::Ordering::Greater, - super::TokenizerState::End => std::cmp::Ordering::Greater, + super::PositionState::PreBlank => std::cmp::Ordering::Less, + super::PositionState::Title => std::cmp::Ordering::Less, + super::PositionState::MetaContent(_, i) if i <= child_index => std::cmp::Ordering::Less, + super::PositionState::MetaContent(_, _) => std::cmp::Ordering::Greater, + super::PositionState::Hexdump { index, .. } if index == child_index => std::cmp::Ordering::Less, + super::PositionState::Hexdump { index, .. } => index.cmp(child_index), + super::PositionState::Hexstring(_, i) if i == child_index => std::cmp::Ordering::Less, + super::PositionState::Hexstring(_, i) => i.cmp(child_index), + super::PositionState::SummaryPreamble => std::cmp::Ordering::Less, + super::PositionState::SummaryOpener => std::cmp::Ordering::Less, + super::PositionState::SummaryLabel(i) if i == child_index => std::cmp::Ordering::Less, + super::PositionState::SummaryLabel(i) => i.cmp(child_index), + super::PositionState::SummarySeparator(i) if i == child_index => std::cmp::Ordering::Greater, + super::PositionState::SummarySeparator(i) => i.cmp(child_index), + super::PositionState::SummaryCloser => std::cmp::Ordering::Greater, + super::PositionState::SummaryEpilogue => std::cmp::Ordering::Greater, + super::PositionState::SummaryValueBegin => std::cmp::Ordering::Less, + super::PositionState::SummaryValueEnd => std::cmp::Ordering::Greater, + super::PositionState::SummaryLeaf => return None, + super::PositionState::PostBlank => std::cmp::Ordering::Greater, + super::PositionState::End => std::cmp::Ordering::Greater, }) } } @@ -1625,7 +1618,7 @@ mod cmp { } pub struct Item { - stack: Option>, + stack: Option>, sod: StateOrDescent, node: sync::Arc, logical_depth: usize, @@ -1637,8 +1630,8 @@ mod cmp { } } - impl From<&super::Tokenizer> for Item { - fn from(t: &super::Tokenizer) -> Self { + impl From<&super::Position> for Item { + fn from(t: &super::Position) -> Self { Self { stack: t.stack.clone(), sod: StateOrDescent::State(t.state.clone()), @@ -1648,8 +1641,8 @@ mod cmp { } } - impl From<&super::TokenizerStackEntry> for Item { - fn from(t: &super::TokenizerStackEntry) -> Self { + impl From<&super::StackEntry> for Item { + fn from(t: &super::StackEntry) -> Self { Self { stack: t.stack.clone(), sod: StateOrDescent::Descent(t.descent.clone()), @@ -1702,38 +1695,38 @@ mod cmp { Postamble, } - pub fn state_tuple(state: &super::TokenizerState) -> (StateGroup, usize, usize, addr::Address, usize) { + pub fn state_tuple(state: &super::PositionState) -> (StateGroup, usize, usize, addr::Address, usize) { match state { - super::TokenizerState::PreBlank => (StateGroup::Preamble, 0, 0, addr::unit::NULL, 0), - super::TokenizerState::Title => (StateGroup::Preamble, 1, 0, addr::unit::NULL, 0), - super::TokenizerState::SummaryValueBegin => (StateGroup::Preamble, 2, 0, addr::unit::NULL, 0), + super::PositionState::PreBlank => (StateGroup::Preamble, 0, 0, addr::unit::NULL, 0), + super::PositionState::Title => (StateGroup::Preamble, 1, 0, addr::unit::NULL, 0), + super::PositionState::SummaryValueBegin => (StateGroup::Preamble, 2, 0, addr::unit::NULL, 0), - super::TokenizerState::MetaContent(addr, index) => (StateGroup::NormalContent, 0, *index, *addr, 0), - super::TokenizerState::Hexdump { extent, line_extent: _, index } => (StateGroup::NormalContent, 0, *index, extent.begin, 1), - super::TokenizerState::Hexstring(extent, index) => (StateGroup::NormalContent, 0, *index, extent.begin, 1), - super::TokenizerState::SummaryPreamble => (StateGroup::SummaryContent, 0, 0, addr::unit::NULL, 0), - super::TokenizerState::SummaryOpener => (StateGroup::SummaryContent, 1, 0, addr::unit::NULL, 0), - super::TokenizerState::SummaryLabel(x) => (StateGroup::SummaryContent, 2, 2*x, addr::unit::NULL, 0), - super::TokenizerState::SummarySeparator(x) => (StateGroup::SummaryContent, 2, 2*x+1, addr::unit::NULL, 0), - super::TokenizerState::SummaryCloser => (StateGroup::SummaryContent, 3, 0, addr::unit::NULL, 0), - super::TokenizerState::SummaryEpilogue => (StateGroup::SummaryContent, 4, 0, addr::unit::NULL, 0), + super::PositionState::MetaContent(addr, index) => (StateGroup::NormalContent, 0, *index, *addr, 0), + super::PositionState::Hexdump { extent, line_extent: _, index } => (StateGroup::NormalContent, 0, *index, extent.begin, 1), + super::PositionState::Hexstring(extent, index) => (StateGroup::NormalContent, 0, *index, extent.begin, 1), + super::PositionState::SummaryPreamble => (StateGroup::SummaryContent, 0, 0, addr::unit::NULL, 0), + super::PositionState::SummaryOpener => (StateGroup::SummaryContent, 1, 0, addr::unit::NULL, 0), + super::PositionState::SummaryLabel(x) => (StateGroup::SummaryContent, 2, 2*x, addr::unit::NULL, 0), + super::PositionState::SummarySeparator(x) => (StateGroup::SummaryContent, 2, 2*x+1, addr::unit::NULL, 0), + super::PositionState::SummaryCloser => (StateGroup::SummaryContent, 3, 0, addr::unit::NULL, 0), + super::PositionState::SummaryEpilogue => (StateGroup::SummaryContent, 4, 0, addr::unit::NULL, 0), - super::TokenizerState::SummaryLeaf => (StateGroup::SummaryLeaf, 0, 0, addr::unit::NULL, 0), + super::PositionState::SummaryLeaf => (StateGroup::SummaryLeaf, 0, 0, addr::unit::NULL, 0), - super::TokenizerState::SummaryValueEnd => (StateGroup::Postamble, 0, 0, addr::unit::NULL, 0), - super::TokenizerState::PostBlank => (StateGroup::Postamble, 1, 0, addr::unit::NULL, 0), - super::TokenizerState::End => (StateGroup::Postamble, 2, 0, addr::unit::NULL, 0), + super::PositionState::SummaryValueEnd => (StateGroup::Postamble, 0, 0, addr::unit::NULL, 0), + super::PositionState::PostBlank => (StateGroup::Postamble, 1, 0, addr::unit::NULL, 0), + super::PositionState::End => (StateGroup::Postamble, 2, 0, addr::unit::NULL, 0), } } } -impl std::cmp::PartialOrd for Tokenizer { +impl std::cmp::PartialOrd for Position { fn partial_cmp(&self, other: &Self) -> Option { cmp::partial_cmp(cmp::Item::from(self), cmp::Item::from(other)) } } -impl std::cmp::PartialOrd for TokenizerState { +impl std::cmp::PartialOrd for PositionState { fn partial_cmp(&self, other: &Self) -> Option { let st1 = cmp::state_tuple(self); let st2 = cmp::state_tuple(other); @@ -1753,7 +1746,7 @@ impl std::cmp::PartialOrd for TokenizerState { } } -impl std::cmp::PartialOrd for TokenizerDescent { +impl std::cmp::PartialOrd for Descent { fn partial_cmp(&self, other: &Self) -> Option { match (self, other) { (Self::Child(x), Self::Child(y)) => Some(x.cmp(y)), @@ -1941,49 +1934,49 @@ pub mod xml { } } -impl AbstractTokenizer for Tokenizer { +impl AbstractPosition for Position { fn at_beginning(root: sync::Arc) -> Self { - Tokenizer::at_beginning(root) + Position::at_beginning(root) } fn at_path(root: sync::Arc, path: &structure::Path, offset: addr::Address) -> Self { - Tokenizer::at_path(root, path, offset) + Position::at_path(root, path, offset) } fn port_change(&mut self, new_doc: &sync::Arc, change: &document::change::Change) { - Tokenizer::port_change(self, &new_doc.root, change, &mut PortOptions::default()); + Position::port_change(self, &new_doc.root, change, &mut PortOptions::default()); } fn hit_top(&self) -> bool { - Tokenizer::hit_top(self) + Position::hit_top(self) } fn hit_bottom(&self) -> bool { - Tokenizer::hit_bottom(self) + Position::hit_bottom(self) } fn gen_token(&self) -> TokenGenerationResult { - Tokenizer::gen_token(self) + Position::gen_token(self) } fn move_prev(&mut self) -> bool { - Tokenizer::move_prev(self) + Position::move_prev(self) } fn move_next(&mut self) -> bool { - Tokenizer::move_next(self) + Position::move_next(self) } fn next_postincrement(&mut self) -> Option { - Tokenizer::next_postincrement(self) + Position::next_postincrement(self) } fn prev(&mut self) -> Option { - Tokenizer::prev(self) + Position::prev(self) } fn in_summary(&self) -> bool { - Tokenizer::in_summary(self) + Position::in_summary(self) } } @@ -1997,10 +1990,10 @@ mod tests { use crate::model::document; use crate::model::versioned::Versioned; - struct DownwardTokenizerIterator(Tokenizer); - struct UpwardTokenizerIterator(Tokenizer); + struct DownwardPositionIterator(Position); + struct UpwardPositionIterator(Position); - impl iter::Iterator for DownwardTokenizerIterator { + impl iter::Iterator for DownwardPositionIterator { type Item = token::Token; fn next(&mut self) -> Option { @@ -2017,7 +2010,7 @@ mod tests { } } - impl iter::Iterator for UpwardTokenizerIterator { + impl iter::Iterator for UpwardPositionIterator { type Item = token::Token; fn next(&mut self) -> Option { @@ -2046,104 +2039,104 @@ mod tests { fn test_forward(tc: &xml::Testcase) { itertools::assert_equal( tc.expected_tokens.iter().map(|x| x.clone()), - &mut DownwardTokenizerIterator(Tokenizer::at_beginning(tc.structure.clone()))); + &mut DownwardPositionIterator(Position::at_beginning(tc.structure.clone()))); } fn test_backward(tc: &xml::Testcase) { itertools::assert_equal( tc.expected_tokens.iter().rev().map(|x| x.clone()), - &mut UpwardTokenizerIterator(Tokenizer::at_end(&tc.structure))); + &mut UpwardPositionIterator(Position::at_end(&tc.structure))); } - fn test_cmp(mut tokenizer: Tokenizer) { - let mut prev = vec![tokenizer.clone()]; - while tokenizer.move_next() { + fn test_cmp(mut position: Position) { + let mut prev = vec![position.clone()]; + while position.move_next() { for p in &prev { - let ordering = p.partial_cmp(&tokenizer); + let ordering = p.partial_cmp(&position); if ordering != Some(std::cmp::Ordering::Less) { - panic!("comparing {:?} to {:?} resulted in incorrect ordering {:?}", p, tokenizer, ordering); + panic!("comparing {:?} to {:?} resulted in incorrect ordering {:?}", p, position, ordering); } } - prev.push(tokenizer.clone()); + prev.push(position.clone()); } } #[test] fn simple() { - let tc = parse_testcase(include_bytes!("tokenizer_tests/simple.xml")); + let tc = parse_testcase(include_bytes!("stream_tests/simple.xml")); test_forward(&tc); test_backward(&tc); } #[test] fn simple_cmp() { - let tc = parse_testcase(include_bytes!("tokenizer_tests/simple.xml")); - test_cmp(Tokenizer::at_beginning(tc.structure.clone())); + let tc = parse_testcase(include_bytes!("stream_tests/simple.xml")); + test_cmp(Position::at_beginning(tc.structure.clone())); } #[test] fn nesting() { - let tc = parse_testcase(include_bytes!("tokenizer_tests/nesting.xml")); + let tc = parse_testcase(include_bytes!("stream_tests/nesting.xml")); test_forward(&tc); test_backward(&tc); } #[test] fn nesting_cmp() { - let tc = parse_testcase(include_bytes!("tokenizer_tests/nesting.xml")); - test_cmp(Tokenizer::at_beginning(tc.structure.clone())); + let tc = parse_testcase(include_bytes!("stream_tests/nesting.xml")); + test_cmp(Position::at_beginning(tc.structure.clone())); } #[test] fn formatting() { - let tc = parse_testcase(include_bytes!("tokenizer_tests/formatting.xml")); + let tc = parse_testcase(include_bytes!("stream_tests/formatting.xml")); test_forward(&tc); test_backward(&tc); } #[test] fn content_display() { - let tc = parse_testcase(include_bytes!("tokenizer_tests/content_display.xml")); + let tc = parse_testcase(include_bytes!("stream_tests/content_display.xml")); test_forward(&tc); test_backward(&tc); } #[test] fn summary() { - let tc = parse_testcase(include_bytes!("tokenizer_tests/summary.xml")); + let tc = parse_testcase(include_bytes!("stream_tests/summary.xml")); test_forward(&tc); test_backward(&tc); } #[test] fn summary_cmp() { - let tc = parse_testcase(include_bytes!("tokenizer_tests/summary.xml")); - test_cmp(Tokenizer::at_beginning(tc.structure.clone())); + let tc = parse_testcase(include_bytes!("stream_tests/summary.xml")); + test_cmp(Position::at_beginning(tc.structure.clone())); } - fn seek_to_token(tokenizer: &mut Tokenizer, target: &token::Token) { - while match tokenizer.gen_token() { + fn seek_to_token(position: &mut Position, target: &token::Token) { + while match position.gen_token() { TokenGenerationResult::Ok(token) => &token != target, TokenGenerationResult::Skip => true, TokenGenerationResult::Boundary => panic!("couldn't find token"), } { - if !tokenizer.move_next() { + if !position.move_next() { panic!("hit end of token stream"); } } } - fn peek(tokenizer: &mut Tokenizer) -> token::Token { + fn peek(position: &mut Position) -> token::Token { loop { - match tokenizer.gen_token() { + match position.gen_token() { TokenGenerationResult::Ok(token) => return token, - TokenGenerationResult::Skip => assert!(tokenizer.move_next()), + TokenGenerationResult::Skip => assert!(position.move_next()), TokenGenerationResult::Boundary => panic!("couldn't find token"), } } } - fn assert_tokenizers_eq(a: &Tokenizer, b: &Tokenizer) { + fn assert_positions_eq(a: &Position, b: &Position) { assert_eq!(a.state, b.state); assert_eq!(a.apparent_depth, b.apparent_depth); assert_eq!(a.logical_depth, b.logical_depth); @@ -2200,32 +2193,32 @@ mod tests { .size(0x4)) .build(); - let tok = Tokenizer::at_path(root.clone(), &vec![1, 1], 0x10.into()); + let tok = Position::at_path(root.clone(), &vec![1, 1], 0x10.into()); - assert_eq!(tok, Tokenizer { - stack: Some(sync::Arc::new(TokenizerStackEntry { - stack: Some(sync::Arc::new(TokenizerStackEntry { - stack: Some(sync::Arc::new(TokenizerStackEntry { + assert_eq!(tok, Position { + stack: Some(sync::Arc::new(StackEntry { + stack: Some(sync::Arc::new(StackEntry { + stack: Some(sync::Arc::new(StackEntry { stack: None, - descent: TokenizerDescent::Child(1), + descent: Descent::Child(1), apparent_depth: 0, logical_depth: 0, node: root.clone(), node_addr: 0x0.into(), })), - descent: TokenizerDescent::MySummary, + descent: Descent::MySummary, apparent_depth: 1, logical_depth: 1, node: root.children[1].node.clone(), node_addr: 0x14.into(), })), - descent: TokenizerDescent::ChildSummary(1), + descent: Descent::ChildSummary(1), apparent_depth: 1, logical_depth: 2, /* ! */ node: root.children[1].node.clone(), node_addr: 0x14.into(), })), - state: TokenizerState::SummaryLeaf, + state: PositionState::SummaryLeaf, apparent_depth: 2, logical_depth: 3, node: root.children[1].node.children[1].node.clone(), @@ -2234,32 +2227,32 @@ mod tests { } fn assert_port_functionality(old_doc: &document::Document, new_doc: &document::Document, records: &[(token::Token, token::Token, PortOptions, PortOptions)]) { - let mut tokenizers: Vec<(Tokenizer, &token::Token, &token::Token, &PortOptions, &PortOptions)> = records.iter().map( + let mut positions: Vec<(Position, &token::Token, &token::Token, &PortOptions, &PortOptions)> = records.iter().map( |(before_token, after_token, before_options, after_options)| ( - Tokenizer::at_beginning(old_doc.root.clone()), + Position::at_beginning(old_doc.root.clone()), before_token, after_token, before_options, after_options) ).collect(); - for (tokenizer, before_token, _after_token, _, _) in tokenizers.iter_mut() { - seek_to_token(tokenizer, before_token); + for (position, before_token, _after_token, _, _) in positions.iter_mut() { + seek_to_token(position, before_token); } - for (tokenizer, _before_token, after_token, options_before, options_after) in tokenizers.iter_mut() { - println!("tokenizer before port: {:#?}", tokenizer); + for (position, _before_token, after_token, options_before, options_after) in positions.iter_mut() { + println!("position before port: {:#?}", position); let mut options = options_before.clone(); - new_doc.changes_since_ref(old_doc, &mut |doc, change| tokenizer.port_change(&doc.root, change, &mut options)); - println!("tokenizer after port: {:#?}", tokenizer); + new_doc.changes_since_ref(old_doc, &mut |doc, change| position.port_change(&doc.root, change, &mut options)); + println!("position after port: {:#?}", position); - assert_eq!(&peek(tokenizer), *after_token); + assert_eq!(&peek(position), *after_token); assert_eq!(&options, *options_after); - /* Check that the ported tokenizer is the same as if we had created a new tokenizer and seeked it (if only we knew where to seek it to...), i.e. its internal state isn't corrupted in a way that doesn't happen during normal tokenizer movement. */ - let mut clean_tokenizer = Tokenizer::at_beginning(new_doc.root.clone()); - seek_to_token(&mut clean_tokenizer, after_token); - assert_tokenizers_eq(&tokenizer, &clean_tokenizer); + /* Check that the ported position is the same as if we had created a new position and seeked it (if only we knew where to seek it to...), i.e. its internal state isn't corrupted in a way that doesn't happen during normal position movement. */ + let mut clean_position = Position::at_beginning(new_doc.root.clone()); + seek_to_token(&mut clean_position, after_token); + assert_positions_eq(&position, &clean_position); } } diff --git a/src/logic/tokenizer_tests/content_display.xml b/src/model/listing/stream_tests/content_display.xml similarity index 100% rename from src/logic/tokenizer_tests/content_display.xml rename to src/model/listing/stream_tests/content_display.xml diff --git a/src/logic/tokenizer_tests/formatting.xml b/src/model/listing/stream_tests/formatting.xml similarity index 100% rename from src/logic/tokenizer_tests/formatting.xml rename to src/model/listing/stream_tests/formatting.xml diff --git a/src/logic/tokenizer_tests/nesting.xml b/src/model/listing/stream_tests/nesting.xml similarity index 100% rename from src/logic/tokenizer_tests/nesting.xml rename to src/model/listing/stream_tests/nesting.xml diff --git a/src/logic/tokenizer_tests/simple.xml b/src/model/listing/stream_tests/simple.xml similarity index 100% rename from src/logic/tokenizer_tests/simple.xml rename to src/model/listing/stream_tests/simple.xml diff --git a/src/logic/tokenizer_tests/summary.xml b/src/model/listing/stream_tests/summary.xml similarity index 100% rename from src/logic/tokenizer_tests/summary.xml rename to src/model/listing/stream_tests/summary.xml diff --git a/src/model/listing/window.rs b/src/model/listing/window.rs index d06fb81..8fe1736 100644 --- a/src/model/listing/window.rs +++ b/src/model/listing/window.rs @@ -6,9 +6,9 @@ use crate::model::addr; use crate::model::document; use crate::model::document::structure; use crate::model::listing::line; +use crate::model::listing::stream; use crate::model::listing::token; use crate::model::versioned::Versioned; -use crate::logic::tokenizer; pub trait LineView { fn from_line(line: line::Line) -> Self; @@ -19,10 +19,10 @@ pub trait LineView { /// A listing window with a fixed height. Useful for scrolling by lines. /// It is up to the user to make sure that this gets properly notified with structure invalidation events. #[derive(Clone)] -pub struct Window { +pub struct Window { pub current_document: sync::Arc, - top: Tokenizer, - bottom: Tokenizer, + top: Position, + bottom: Position, pub line_views: collections::VecDeque, pub window_height: usize, @@ -30,11 +30,11 @@ pub struct Window Window { - pub fn new(doc: sync::Arc) -> Window { +impl Window { + pub fn new(doc: sync::Arc) -> Window { Window { - top: Tokenizer::at_beginning(doc.root.clone()), - bottom: Tokenizer::at_beginning(doc.root.clone()), + top: Position::at_beginning(doc.root.clone()), + bottom: Position::at_beginning(doc.root.clone()), current_document: doc, @@ -50,14 +50,14 @@ impl Window, path: &structure::Path, offset: addr::Address) -> usize { self.current_document = document; let root = self.current_document.root.clone(); - self.repopulate_window(move |tok, _| *tok = Tokenizer::at_path(root, path, offset)) + self.repopulate_window(move |tok, _| *tok = Position::at_path(root, path, offset)) } - fn repopulate_window(&mut self, tokenizer_provider: F) -> usize where - F: FnOnce(&mut Tokenizer, &mut sync::Arc) { + fn repopulate_window(&mut self, position_provider: F) -> usize where + F: FnOnce(&mut Position, &mut sync::Arc) { self.bottom = self.top.clone(); - tokenizer_provider(&mut self.bottom, &mut self.current_document); - let (first_line, top, _index) = line::Line::containing_tokenizer(&mut self.bottom); + position_provider(&mut self.bottom, &mut self.current_document); + let (first_line, top, _index) = line::Line::containing_position(&mut self.bottom); self.top = top; self.line_views.clear(); @@ -90,7 +90,7 @@ impl Window Window(window: &Window) { + fn print_lines(window: &Window) { for l in &window.line_views { print!(" "); for t in l.iter_tokens() { @@ -378,7 +378,7 @@ mod tests { } #[test] - fn containing_tokenizer() { + fn containing_position() { let root = structure::Node::builder() .name("root") .size(0x40) @@ -407,30 +407,30 @@ mod tests { .build(); /* Pregenerate all the lines from a simple forward walk through the whole document. */ - let mut tokenizer = tokenizer::Tokenizer::at_beginning(root.clone()); + let mut position = stream::Position::at_beginning(root.clone()); let mut lines = vec![]; loop { - let mut begin = tokenizer.clone(); + let mut begin = position.clone(); begin.canonicalize_next(); - let line = line::Line::next_from_tokenizer(&mut tokenizer); + let line = line::Line::next_from_position(&mut position); if line.is_empty() { break; } - let mut end = tokenizer.clone(); + let mut end = position.clone(); end.canonicalize_next(); lines.push((begin, line, end)); } - let mut tokenizer = tokenizer::Tokenizer::at_beginning(root.clone()); + let mut position = stream::Position::at_beginning(root.clone()); let mut i = 0; loop { - let token = match tokenizer.gen_token() { - tokenizer::TokenGenerationResult::Ok(token) => token, - tokenizer::TokenGenerationResult::Skip => if tokenizer.move_next() { continue } else { break }, - tokenizer::TokenGenerationResult::Boundary => break, + let token = match position.gen_token() { + stream::TokenGenerationResult::Ok(token) => token, + stream::TokenGenerationResult::Skip => if position.move_next() { continue } else { break }, + stream::TokenGenerationResult::Boundary => break, }; let expected_index_in_line = loop { @@ -441,27 +441,27 @@ mod tests { } }; - let mut line_end = tokenizer.clone(); - let (line, mut line_begin, index_in_line) = line::Line::containing_tokenizer(&mut line_end); + let mut line_end = position.clone(); + let (line, mut line_begin, index_in_line) = line::Line::containing_position(&mut line_end); line_begin.canonicalize_next(); line_end.canonicalize_next(); if line != lines[i].1 || index_in_line != expected_index_in_line || line_begin != lines[i].0 || line_end != lines[i].2 { println!("seeked to {:?}", token); println!("line from forward walk : {}", lines[i].1); - println!("line from containing_tokenizer: {}", line); + println!("line from containing_position: {}", line); println!("expected index {}, got index {}", expected_index_in_line, index_in_line); - println!("begin tokenizer [actual] : {:#?}", line_begin); - println!("begin tokenizer [expected]: {:#?}", lines[i].0); + println!("begin position [actual] : {:#?}", line_begin); + println!("begin position [expected]: {:#?}", lines[i].0); - println!("end tokenizer [actual] : {:#?}", line_end); - println!("end tokenizer [expected]: {:#?}", lines[i].2); + println!("end position [actual] : {:#?}", line_end); + println!("end position [expected]: {:#?}", lines[i].2); panic!("mismatched"); } - tokenizer.move_next(); + position.move_next(); } } }