Skip to content

Commit

Permalink
Fix cursor update behavior when a node is inserted or deleted over (b…
Browse files Browse the repository at this point in the history
…ut not AT) the cursor, as can happen now when doing so from a selection
  • Loading branch information
misson20000 committed Apr 28, 2024
1 parent 6b8ff5d commit 1fd28b6
Show file tree
Hide file tree
Showing 5 changed files with 107 additions and 31 deletions.
53 changes: 36 additions & 17 deletions src/logic/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,10 +124,11 @@ impl std::fmt::Debug for TokenizerStackEntry {
}
}

#[derive(Clone, Debug, Default)]
#[non_exhaustive]
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct PortOptions {
additional_offset: addr::Size,
prefer_after_new_node: bool,
pub additional_offset: Option<addr::Size>,
pub prefer_after_new_node: bool,
}

pub struct PortOptionsBuilder {
Expand Down Expand Up @@ -203,8 +204,8 @@ impl PortOptionsBuilder {
}
}

pub fn additional_offset(mut self, offset: addr::Size) -> PortOptionsBuilder {
self.options.additional_offset = offset;
pub fn additional_offset<T: Into<addr::Size>>(mut self, offset: T) -> PortOptionsBuilder {
self.options.additional_offset = Some(offset.into());
self
}

Expand Down Expand Up @@ -297,7 +298,7 @@ impl Tokenizer {

/// Applies a single change to the tokenizer state.
#[instrument]
pub fn port_change(&mut self, new_root: &sync::Arc<structure::Node>, change: &change::Change, options: &PortOptions) {
pub fn port_change(&mut self, new_root: &sync::Arc<structure::Node>, change: &change::Change, options: &mut PortOptions) {
/* Recreate our stack, processing descents and such, leaving off with some information about the node we're actually on now. */
let stack_state = match &self.stack {
Some(parent) => Self::port_recurse(parent, new_root, change),
Expand Down Expand Up @@ -413,8 +414,8 @@ impl Tokenizer {
};

/* Respect the additional offset that the options requested. */
if let Some(offset) = offset.as_mut() {
*offset = *offset + options.additional_offset;
if let (Some(offset), Some(additional_offset)) = (offset.as_mut(), options.additional_offset) {
*offset = *offset + additional_offset;
}

/* Adjust the offset and index. */
Expand Down Expand Up @@ -498,7 +499,12 @@ impl Tokenizer {
/* This should've been handled earlier, but whatever. */
IntermediatePortState::Finished(finalized_state) => finalized_state,

IntermediatePortState::NormalContent(offset, index) => TokenizerState::MetaContent(self.estimate_line_begin(offset, index), index),
IntermediatePortState::NormalContent(offset, index) => {
let line_begin = self.estimate_line_begin(offset, index);
/* Output the difference between the requested "additional offset" and where the token actually will begin. */
options.additional_offset = offset.map(|offset| offset - line_begin);
TokenizerState::MetaContent(line_begin, index)
},

/* Real TokenizerState doesn't support one-past-the-end for SummaryLabel and SummarySeparator, so need to fix if that would be the case. */
IntermediatePortState::SummaryLabel(index) if index < children.len() => TokenizerState::SummaryLabel(index),
Expand Down Expand Up @@ -1711,19 +1717,28 @@ mod tests {
};
}

fn assert_port_functionality(old_doc: &document::Document, new_doc: &document::Document, records: &[(token::Token, token::Token, PortOptions)]) {
let mut tokenizers: Vec<(Tokenizer, &token::Token, &token::Token, &PortOptions)> = records.iter().map(|(before_token, after_token, options)| (Tokenizer::at_beginning(old_doc.root.clone()), before_token, after_token, options)).collect();
fn assert_port_functionality(old_doc: &document::Document, new_doc: &document::Document, records: &[(token::Token, token::Token, PortOptions, PortOptions)]) {
let mut tokenizers: Vec<(Tokenizer, &token::Token, &token::Token, &PortOptions, &PortOptions)> = records.iter().map(
|(before_token, after_token, before_options, after_options)| (
Tokenizer::at_beginning(old_doc.root.clone()),
before_token,
after_token,
before_options,
after_options)
).collect();

for (tokenizer, before_token, _after_token, _) in tokenizers.iter_mut() {
for (tokenizer, before_token, _after_token, _, _) in tokenizers.iter_mut() {
seek_to_token(tokenizer, before_token);
}

for (tokenizer, _before_token, after_token, options) in tokenizers.iter_mut() {
for (tokenizer, _before_token, after_token, options_before, options_after) in tokenizers.iter_mut() {
println!("tokenizer before port: {:#?}", tokenizer);
new_doc.changes_since_ref(old_doc, &mut |doc, change| tokenizer.port_change(&doc.root, change, options));
let mut options = options_before.clone();
new_doc.changes_since_ref(old_doc, &mut |doc, change| tokenizer.port_change(&doc.root, change, &mut options));
println!("tokenizer after port: {:#?}", tokenizer);

assert_eq!(&peek(tokenizer), *after_token);
assert_eq!(&options, *options_after);

/* Check that the ported tokenizer is the same as if we had created a new tokenizer and seeked it (if only we knew where to seek it to...), i.e. its internal state isn't corrupted in a way that doesn't happen during normal tokenizer movement. */
let mut clean_tokenizer = Tokenizer::at_beginning(new_doc.root.clone());
Expand All @@ -1749,7 +1764,7 @@ mod tests {
.child(0x20, |b| b
.name("child1.1")
.size(0x18))
.child(0x30, |b| b
.child(0x34, |b| b
.name("child1.2")
.size(0x18))
.child(0x48, |b| b
Expand All @@ -1762,6 +1777,8 @@ mod tests {

let old_doc = document::Builder::new(root).build();
let mut new_doc = old_doc.clone();

/* Delete child1.1 and child1.2. */
new_doc.change_for_debug(old_doc.delete_range(structure::SiblingRange::new(vec![1], 1, 2))).unwrap();

let (o_child_1_2, o_child_1_2_addr) = old_doc.lookup_node(&vec![1, 2]);
Expand Down Expand Up @@ -1790,7 +1807,8 @@ mod tests {
extent: addr::Extent::sized_u64(0x40, 0x8),
line: addr::Extent::sized_u64(0x40, 0x10)
}),
PortOptionsBuilder::new().build()
PortOptionsBuilder::new().additional_offset(0x4).build(), /* Asking about offset 0x14 into old child1.2 */
PortOptionsBuilder::new().additional_offset(0x8).build(), /* Becomes offset 0x48 in new child1 */
),
(
token::Token::Hexdump(token::HexdumpToken {
Expand All @@ -1814,7 +1832,8 @@ mod tests {
extent: addr::Extent::sized_u64(0x10, 0xc),
line: addr::Extent::sized_u64(0x10, 0x10),
}),
PortOptionsBuilder::new().build()
PortOptionsBuilder::new().additional_offset(0x4).build(),
PortOptionsBuilder::new().additional_offset(0x4).build(),
),
]);
}
Expand Down
79 changes: 68 additions & 11 deletions src/model/listing/cursor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -154,22 +154,23 @@ impl Cursor {
_ => {},
};

let options = options.build();

let mut options = options.build();
let mut tokenizer = self.tokenizer.clone();

document.changes_since(&self.document, &mut |document, change| {
self.tokenizer.port_change(
tokenizer.port_change(
&document.root,
change,
&options);
&mut options);
});

let mut tokenizer = self.tokenizer.clone();

let class = match CursorClass::place_forward(&mut tokenizer, self.class.get_addr(), &self.class.get_placement_hint()) {
let offset = tokenizer.structure_position_offset() + options.additional_offset.unwrap_or(addr::unit::ZERO);

let class = match CursorClass::place_forward(&mut tokenizer, offset, &self.class.get_placement_hint()) {
Ok(cc) => cc,
Err(PlacementFailure::HitBottomOfAddressSpace) => {
tokenizer = self.tokenizer.clone();
match CursorClass::place_backward(&mut tokenizer, self.class.get_addr(), &self.class.get_placement_hint()) {
match CursorClass::place_backward(&mut tokenizer, offset, &self.class.get_placement_hint()) {
Ok(cc) => cc,
Err(PlacementFailure::HitTopOfAddressSpace) => panic!("expected to be able to place cursor somewhere"),
Err(_) => panic!("unexpected error from CursorClass::place_backward")
Expand Down Expand Up @@ -283,10 +284,10 @@ impl CursorClass {
/// one.
fn new_placement(token: token::Token, offset: addr::Address, hint: &PlacementHint) -> Result<CursorClass, token::Token> {
match token {
token::Token::Title(token) => title::Cursor::new_placement(token, offset, hint).map(CursorClass::Title).map_err(TokenKind::into_token),
token::Token::Title(token) => title::Cursor::new_placement(token, hint).map(CursorClass::Title).map_err(TokenKind::into_token),
token::Token::Hexdump(token) => hexdump::Cursor::new_placement(token, offset, hint).map(CursorClass::Hexdump).map_err(TokenKind::into_token),
token::Token::SummaryPunctuation(token) if token.kind.accepts_cursor() => punctuation::Cursor::new_placement(token.into_token(), offset, hint).map(CursorClass::Punctuation),
token::Token::BlankLine(token) if token.accepts_cursor => punctuation::Cursor::new_placement(token.into_token(), offset, hint).map(CursorClass::Punctuation),
token::Token::SummaryPunctuation(token) if token.kind.accepts_cursor() => punctuation::Cursor::new_placement(token.into_token(), hint).map(CursorClass::Punctuation),
token::Token::BlankLine(token) if token.accepts_cursor => punctuation::Cursor::new_placement(token.into_token(), hint).map(CursorClass::Punctuation),
_ => Err(token)
}
}
Expand Down Expand Up @@ -593,4 +594,60 @@ mod tests {
}).as_ref());
assert_matches!(&cursor.class, CursorClass::Hexdump(hxc) if hxc.offset == addr::unit::ZERO && hxc.low_nybble == false);
}

#[test]
fn node_insertion_around() {
let document_host = sync::Arc::new(document::Builder::default().host());
let document = document_host.get();
let mut cursor = Cursor::place(document.clone(), &vec![], 0x24.into(), PlacementHint::Unused).unwrap();

assert_eq!(cursor.class.get_token(), token::Token::Hexdump(token::HexdumpToken {
common: token::TokenCommon {
node: document.root.clone(),
node_path: structure::Path::default(),
node_addr: addr::unit::NULL,
depth: 1,
},
extent: addr::Extent::sized(0x20.into(), 0x10.into()),
line: addr::Extent::sized(0x20.into(), 0x10.into()),
}).as_ref());
assert_matches!(&cursor.class, CursorClass::Hexdump(hxc) if hxc.offset == 0x4.into() && hxc.low_nybble == false);

let node = sync::Arc::new(structure::Node {
props: structure::Properties {
name: "child".to_string(),
title_display: structure::TitleDisplay::Minor,
children_display: structure::ChildrenDisplay::Full,
content_display: structure::ContentDisplay::Hexdump {
line_pitch: addr::Size::from(16),
gutter_pitch: addr::Size::from(8),
},
locked: false,
},
children: vec::Vec::new(),
size: addr::Size::from(0x30),
});

let document = document_host.change(document.insert_node(
vec![],
0,
structure::Childhood::new(node.clone(), 0x12.into())
)).unwrap();

/* port the cursor over */
cursor.update(&document);

/* make sure it winds up in the correct place */
assert_eq!(cursor.class.get_token(), token::Token::Hexdump(token::HexdumpToken {
common: token::TokenCommon {
node: node.clone(),
node_path: vec![0],
node_addr: 0x12.into(),
depth: 2,
},
extent: addr::Extent::sized(0x10.into(), 0x10.into()),
line: addr::Extent::sized(0x10.into(), 0x10.into()),
}).as_ref());
assert_matches!(&cursor.class, CursorClass::Hexdump(hxc) if hxc.offset == 0x2.into() && hxc.low_nybble == false);
}
}
2 changes: 1 addition & 1 deletion src/model/listing/cursor/punctuation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ impl Cursor {
}
}

pub fn new_placement(token: token::Token, _offset: addr::Address, hint: &cursor::PlacementHint) -> Result<Cursor, token::Token> {
pub fn new_placement(token: token::Token, hint: &cursor::PlacementHint) -> Result<Cursor, token::Token> {
match hint {
/* we only place the cursor on punctuation if explicitly requested;
* otherwise, we prefer to place it on a content token. */
Expand Down
2 changes: 1 addition & 1 deletion src/model/listing/cursor/title.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ impl Cursor {
}
}

pub fn new_placement(token: token::TitleToken, _offset: addr::Address, hint: &cursor::PlacementHint) -> Result<Cursor, token::TitleToken> {
pub fn new_placement(token: token::TitleToken, hint: &cursor::PlacementHint) -> Result<Cursor, token::TitleToken> {
match hint {
/* we only place the cursor on a break header if explicitly requested;
* otherwise, we prefer to place it on a content token. */
Expand Down
2 changes: 1 addition & 1 deletion src/model/listing/layout.rs
Original file line number Diff line number Diff line change
Expand Up @@ -667,7 +667,7 @@ impl WindowTokenizer for tokenizer::Tokenizer {
}

fn port_change(&mut self, new_doc: &sync::Arc<document::Document>, change: &document::change::Change) {
tokenizer::Tokenizer::port_change(self, &new_doc.root, change, &tokenizer::PortOptions::default());
tokenizer::Tokenizer::port_change(self, &new_doc.root, change, &mut tokenizer::PortOptions::default());
}

fn hit_top(&self) -> bool {
Expand Down

0 comments on commit 1fd28b6

Please sign in to comment.