Skip to content

Commit

Permalink
Fix a few bugs including sub-lexers adding additional newlines when
Browse files Browse the repository at this point in the history
EnsureNL is true.
  • Loading branch information
alecthomas committed Feb 6, 2021
1 parent 2a1e1a1 commit 5da8316
Show file tree
Hide file tree
Showing 7 changed files with 56 additions and 34 deletions.
32 changes: 27 additions & 5 deletions formatters/tty_indexed.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,20 @@ var c = chroma.MustParseColour

var ttyTables = map[int]*ttyTable{
8: {
foreground: map[chroma.Colour]string{
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
c("#555555"): "\033[1m\033[30m", c("#ff0000"): "\033[1m\033[31m", c("#00ff00"): "\033[1m\033[32m", c("#ffff00"): "\033[1m\033[33m",
c("#0000ff"): "\033[1m\033[34m", c("#ff00ff"): "\033[1m\033[35m", c("#00ffff"): "\033[1m\033[36m", c("#ffffff"): "\033[1m\033[37m",
},
background: map[chroma.Colour]string{
c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m",
c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m",
c("#555555"): "\033[1m\033[40m", c("#ff0000"): "\033[1m\033[41m", c("#00ff00"): "\033[1m\033[42m", c("#ffff00"): "\033[1m\033[43m",
c("#0000ff"): "\033[1m\033[44m", c("#ff00ff"): "\033[1m\033[45m", c("#00ffff"): "\033[1m\033[46m", c("#ffffff"): "\033[1m\033[47m",
},
},
16: {
foreground: map[chroma.Colour]string{
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
Expand Down Expand Up @@ -227,15 +241,11 @@ type indexedTTYFormatter struct {
func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) {
theme := styleToEscapeSequence(c.table, style)
for token := it(); token != chroma.EOF; token = it() {
// TODO: Cache token lookups?
clr, ok := theme[token.Type]
if !ok {
clr, ok = theme[token.Type.SubCategory()]
if !ok {
clr = theme[token.Type.Category()]
// if !ok {
// clr = theme[chroma.InheritStyle]
// }
}
}
if clr != "" {
Expand All @@ -249,10 +259,22 @@ func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma
return nil
}

// TTY is an 8-colour terminal formatter.
//
// The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY = Register("terminal", &indexedTTYFormatter{ttyTables[8]})

// TTY8 is an 8-colour terminal formatter.
//
// The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY8 = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
var TTY8 = Register("terminal8", &indexedTTYFormatter{ttyTables[8]})

// TTY16 is a 16-colour terminal formatter.
//
// It uses \033[3xm for normal colours and \033[90Xm for bright colours.
//
// The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY16 = Register("terminal16", &indexedTTYFormatter{ttyTables[16]})

// TTY256 is a 256-colour terminal formatter.
//
Expand Down
2 changes: 1 addition & 1 deletion iterator.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import "strings"

// An Iterator across tokens.
//
// nil will be returned at the end of the Token stream.
// EOF will be returned at the end of the Token stream.
//
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
type Iterator func() Token
Expand Down
3 changes: 1 addition & 2 deletions lexers/testdata/arduino.expected
Original file line number Diff line number Diff line change
Expand Up @@ -541,6 +541,5 @@
{"type":"Text","value":" \n "},
{"type":"Punctuation","value":"}"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":"}"},
{"type":"Text","value":"\n"}
{"type":"Punctuation","value":"}"}
]
6 changes: 0 additions & 6 deletions lexers/testdata/promql.expected
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
{"type":"LiteralString","value":"alertmanager"},
{"type":"Punctuation","value":"\"}"},
{"type":"TextWhitespace","value":"\n\n"},

{"type":"CommentSingle","value":"# Aggregation operators"},
{"type":"TextWhitespace","value":"\n"},
{"type":"Keyword","value":"sum"},
Expand Down Expand Up @@ -47,7 +46,6 @@
{"type":"TextWhitespace","value":" "},
{"type":"LiteralNumberInteger","value":"1024"},
{"type":"TextWhitespace","value":"\n\n"},

{"type":"CommentSingle","value":"# Metric with multiple lables and whitespaces"},
{"type":"TextWhitespace","value":"\n"},
{"type":"NameVariable","value":"go_gc_duration_seconds"},
Expand All @@ -67,7 +65,6 @@
{"type":"TextWhitespace","value":" "},
{"type":"Punctuation","value":"}"},
{"type":"TextWhitespace","value":"\n\n"},

{"type":"CommentSingle","value":"# Expression and comment"},
{"type":"TextWhitespace","value":"\n"},
{"type":"NameVariable","value":"go_gc_duration_seconds"},
Expand All @@ -80,7 +77,6 @@
{"type":"TextWhitespace","value":" "},
{"type":"CommentSingle","value":"# single comment"},
{"type":"TextWhitespace","value":"\n\n"},

{"type":"CommentSingle","value":"# Delta function"},
{"type":"TextWhitespace","value":"\n"},
{"type":"KeywordReserved","value":"delta"},
Expand All @@ -96,7 +92,6 @@
{"type":"Punctuation","value":"]"},
{"type":"Operator","value":")"},
{"type":"TextWhitespace","value":"\n\n"},

{"type":"CommentSingle","value":"# Sum with arguments"},
{"type":"TextWhitespace","value":"\n"},
{"type":"Keyword","value":"sum"},
Expand All @@ -114,7 +109,6 @@
{"type":"NameVariable","value":"instance_memory_usage_bytes"},
{"type":"Operator","value":")"},
{"type":"TextWhitespace","value":"\n\n"},

{"type":"CommentSingle","value":"# Multi-line with offset"},
{"type":"TextWhitespace","value":"\n"},
{"type":"KeywordReserved","value":"label_replace"},
Expand Down
34 changes: 17 additions & 17 deletions lexers/testdata/stylus.expected
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
[
{"type":"NameVariable", "value":"$white"},
{"type":"Text", "value":" "},
{"type":"Operator", "value":"?="},
{"type":"Text", "value":" "},
{"type":"LiteralNumberHex", "value":"#fff"},
{"type":"Text", "value":"\n"},
{"type":"NameVariable","value":"$white"},
{"type":"Text","value":" "},
{"type":"Operator","value":"?="},
{"type":"Text","value":" "},
{"type":"LiteralNumberHex","value":"#fff"},
{"type":"Text","value":"\n"},
{"type":"NameTag","value":"body"},
{"type":"Text", "value":" "},
{"type":"Punctuation", "value":"{"},
{"type":"Text", "value":"\n "},
{"type":"NameProperty", "value":"color"},
{"type":"Punctuation", "value":":"},
{"type":"Text", "value":" "},
{"type":"NameVariable", "value":"$white"},
{"type":"Punctuation", "value":";"},
{"type":"Text", "value":"\n"},
{"type":"Punctuation", "value":"}"}
]
{"type":"Text","value":" "},
{"type":"Punctuation","value":"{"},
{"type":"Text","value":"\n "},
{"type":"NameProperty","value":"color"},
{"type":"Punctuation","value":":"},
{"type":"Text","value":" "},
{"type":"NameVariable","value":"$white"},
{"type":"Punctuation","value":";"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":"}"}
]
3 changes: 1 addition & 2 deletions lexers/testdata/tsx.expected
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,5 @@
{"type":"LiteralStringSingle","value":"'root'"},
{"type":"Punctuation","value":"),"},
{"type":"Text","value":"\n"},
{"type":"Punctuation","value":")"},
{"type":"Text","value":"\n"}
{"type":"Punctuation","value":")"}
]
10 changes: 9 additions & 1 deletion regexp.go
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,7 @@ type LexerState struct {
MutatorContext map[interface{}]interface{}
iteratorStack []Iterator
options *TokeniseOptions
newlineAdded bool
}

// Set mutator context.
Expand All @@ -278,7 +279,11 @@ func (l *LexerState) Get(key interface{}) interface{} {

// Iterator returns the next Token from the lexer.
func (l *LexerState) Iterator() Token { // nolint: gocognit
for l.Pos < len(l.Text) && len(l.Stack) > 0 {
end := len(l.Text)
if l.newlineAdded {
end--
}
for l.Pos < end && len(l.Stack) > 0 {
// Exhaust the iterator stack, if any.
for len(l.iteratorStack) > 0 {
n := len(l.iteratorStack) - 1
Expand Down Expand Up @@ -432,10 +437,13 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
if options.EnsureLF {
text = ensureLF(text)
}
newlineAdded := false
if !options.Nested && r.config.EnsureNL && !strings.HasSuffix(text, "\n") {
text += "\n"
newlineAdded = true
}
state := &LexerState{
newlineAdded: newlineAdded,
options: options,
Lexer: r,
Text: []rune(text),
Expand Down

0 comments on commit 5da8316

Please sign in to comment.